From da059a6b0afdf971abe6ffbdc5ca4aec09c61b0d Mon Sep 17 00:00:00 2001 From: "mongodb-dbx-release-bot[bot]" <167856002+mongodb-dbx-release-bot[bot]@users.noreply.github.com> Date: Tue, 1 Oct 2024 23:09:24 +0000 Subject: [PATCH 001/182] BUMP 4.11.0.dev0 Signed-off-by: mongodb-dbx-release-bot[bot] <167856002+mongodb-dbx-release-bot[bot]@users.noreply.github.com> --- pymongo/_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pymongo/_version.py b/pymongo/_version.py index c0232ba514..3de24a8e14 100644 --- a/pymongo/_version.py +++ b/pymongo/_version.py @@ -18,7 +18,7 @@ import re from typing import List, Tuple, Union -__version__ = "4.10.1" +__version__ = "4.11.0.dev0" def get_version_tuple(version: str) -> Tuple[Union[int, str], ...]: From 2a83349f7159c0117848cf3ab1a67b6ad7d6cf0d Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Wed, 2 Oct 2024 11:34:43 -0500 Subject: [PATCH 002/182] PYTHON-4812 Update changelog for 4.9.2 and 4.9.1 [master] (#1892) --- doc/changelog.rst | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/doc/changelog.rst b/doc/changelog.rst index 76e91c2b27..574ecad763 100644 --- a/doc/changelog.rst +++ b/doc/changelog.rst @@ -36,6 +36,36 @@ in this release. .. _PyMongo 4.10 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=40553 +Changes in Version 4.9.2 +------------------------- + +- Fixed a bug where :class:`~pymongo.asynchronous.mongo_client.AsyncMongoClient` could deadlock. +- Fixed a bug where PyMongo could fail to import on Windows if ``asyncio`` is misconfigured. +- Fixed a bug where :meth:`~pymongo.results.UpdateResult.did_upsert` would raise a ``TypeError``. + +Issues Resolved +............... + +See the `PyMongo 4.9.2 release notes in JIRA`_ for the list of resolved issues +in this release. + +.. _PyMongo 4.9.2 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=40732 + + +Changes in Version 4.9.1 +------------------------- + +- Add missing documentation about the fact the async API is in beta state. + +Issues Resolved +............... + +See the `PyMongo 4.9.1 release notes in JIRA`_ for the list of resolved issues +in this release. + +.. _PyMongo 4.9.1 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=40720 + + Changes in Version 4.9.0 ------------------------- From af23139b4ab7aeba5da71b571809cac6474391a1 Mon Sep 17 00:00:00 2001 From: Iris <58442094+sleepyStick@users.noreply.github.com> Date: Thu, 3 Oct 2024 10:27:22 -0700 Subject: [PATCH 003/182] PYTHON-4805 Migrate test_connections_survive_primary_stepdown_spec.py to async (#1889) --- test/asynchronous/helpers.py | 11 ++ ...nnections_survive_primary_stepdown_spec.py | 148 ++++++++++++++++++ test/helpers.py | 11 ++ ...nnections_survive_primary_stepdown_spec.py | 10 +- test/utils.py | 48 ++++-- tools/synchro.py | 3 + 6 files changed, 217 insertions(+), 14 deletions(-) create mode 100644 test/asynchronous/test_connections_survive_primary_stepdown_spec.py diff --git a/test/asynchronous/helpers.py b/test/asynchronous/helpers.py index 46f66af62d..b5fc5d8ac4 100644 --- a/test/asynchronous/helpers.py +++ b/test/asynchronous/helpers.py @@ -42,6 +42,7 @@ from bson.son import SON from pymongo import common, message +from pymongo.read_preferences import ReadPreference from pymongo.ssl_support import HAVE_SSL, _ssl # type:ignore[attr-defined] from pymongo.uri_parser import parse_uri @@ -150,6 +151,16 @@ def _create_user(authdb, user, pwd=None, roles=None, **kwargs): return authdb.command(cmd) +async def async_repl_set_step_down(client, **kwargs): + """Run replSetStepDown, first unfreezing a secondary with replSetFreeze.""" + cmd = SON([("replSetStepDown", 1)]) + cmd.update(kwargs) + + # Unfreeze a secondary to ensure a speedy election. + await client.admin.command("replSetFreeze", 0, read_preference=ReadPreference.SECONDARY) + await client.admin.command(cmd) + + class client_knobs: def __init__( self, diff --git a/test/asynchronous/test_connections_survive_primary_stepdown_spec.py b/test/asynchronous/test_connections_survive_primary_stepdown_spec.py new file mode 100644 index 0000000000..289cf49751 --- /dev/null +++ b/test/asynchronous/test_connections_survive_primary_stepdown_spec.py @@ -0,0 +1,148 @@ +# Copyright 2019-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Test compliance with the connections survive primary step down spec.""" +from __future__ import annotations + +import sys + +sys.path[0:0] = [""] + +from test.asynchronous import AsyncIntegrationTest, async_client_context, unittest +from test.asynchronous.helpers import async_repl_set_step_down +from test.utils import ( + CMAPListener, + async_ensure_all_connected, +) + +from bson import SON +from pymongo import monitoring +from pymongo.asynchronous.collection import AsyncCollection +from pymongo.errors import NotPrimaryError +from pymongo.write_concern import WriteConcern + +_IS_SYNC = False + + +class TestAsyncConnectionsSurvivePrimaryStepDown(AsyncIntegrationTest): + listener: CMAPListener + coll: AsyncCollection + + @classmethod + @async_client_context.require_replica_set + async def _setup_class(cls): + await super()._setup_class() + cls.listener = CMAPListener() + cls.client = await cls.unmanaged_async_rs_or_single_client( + event_listeners=[cls.listener], retryWrites=False, heartbeatFrequencyMS=500 + ) + + # Ensure connections to all servers in replica set. This is to test + # that the is_writable flag is properly updated for connections that + # survive a replica set election. + await async_ensure_all_connected(cls.client) + cls.listener.reset() + + cls.db = cls.client.get_database("step-down", write_concern=WriteConcern("majority")) + cls.coll = cls.db.get_collection("step-down", write_concern=WriteConcern("majority")) + + @classmethod + async def _tearDown_class(cls): + await cls.client.close() + + async def asyncSetUp(self): + # Note that all ops use same write-concern as self.db (majority). + await self.db.drop_collection("step-down") + await self.db.create_collection("step-down") + self.listener.reset() + + async def set_fail_point(self, command_args): + cmd = SON([("configureFailPoint", "failCommand")]) + cmd.update(command_args) + await self.client.admin.command(cmd) + + def verify_pool_cleared(self): + self.assertEqual(self.listener.event_count(monitoring.PoolClearedEvent), 1) + + def verify_pool_not_cleared(self): + self.assertEqual(self.listener.event_count(monitoring.PoolClearedEvent), 0) + + @async_client_context.require_version_min(4, 2, -1) + async def test_get_more_iteration(self): + # Insert 5 documents with WC majority. + await self.coll.insert_many([{"data": k} for k in range(5)]) + # Start a find operation and retrieve first batch of results. + batch_size = 2 + cursor = self.coll.find(batch_size=batch_size) + for _ in range(batch_size): + await cursor.next() + # Force step-down the primary. + await async_repl_set_step_down(self.client, replSetStepDown=5, force=True) + # Get await anext batch of results. + for _ in range(batch_size): + await cursor.next() + # Verify pool not cleared. + self.verify_pool_not_cleared() + # Attempt insertion to mark server description as stale and prevent a + # NotPrimaryError on the subsequent operation. + try: + await self.coll.insert_one({}) + except NotPrimaryError: + pass + # Next insert should succeed on the new primary without clearing pool. + await self.coll.insert_one({}) + self.verify_pool_not_cleared() + + async def run_scenario(self, error_code, retry, pool_status_checker): + # Set fail point. + await self.set_fail_point( + {"mode": {"times": 1}, "data": {"failCommands": ["insert"], "errorCode": error_code}} + ) + self.addAsyncCleanup(self.set_fail_point, {"mode": "off"}) + # Insert record and verify failure. + with self.assertRaises(NotPrimaryError) as exc: + await self.coll.insert_one({"test": 1}) + self.assertEqual(exc.exception.details["code"], error_code) # type: ignore[call-overload] + # Retry before CMAPListener assertion if retry_before=True. + if retry: + await self.coll.insert_one({"test": 1}) + # Verify pool cleared/not cleared. + pool_status_checker() + # Always retry here to ensure discovery of new primary. + await self.coll.insert_one({"test": 1}) + + @async_client_context.require_version_min(4, 2, -1) + @async_client_context.require_test_commands + async def test_not_primary_keep_connection_pool(self): + await self.run_scenario(10107, True, self.verify_pool_not_cleared) + + @async_client_context.require_version_min(4, 0, 0) + @async_client_context.require_version_max(4, 1, 0, -1) + @async_client_context.require_test_commands + async def test_not_primary_reset_connection_pool(self): + await self.run_scenario(10107, False, self.verify_pool_cleared) + + @async_client_context.require_version_min(4, 0, 0) + @async_client_context.require_test_commands + async def test_shutdown_in_progress(self): + await self.run_scenario(91, False, self.verify_pool_cleared) + + @async_client_context.require_version_min(4, 0, 0) + @async_client_context.require_test_commands + async def test_interrupted_at_shutdown(self): + await self.run_scenario(11600, False, self.verify_pool_cleared) + + +if __name__ == "__main__": + unittest.main() diff --git a/test/helpers.py b/test/helpers.py index bf6186d1a0..11d5ab0374 100644 --- a/test/helpers.py +++ b/test/helpers.py @@ -42,6 +42,7 @@ from bson.son import SON from pymongo import common, message +from pymongo.read_preferences import ReadPreference from pymongo.ssl_support import HAVE_SSL, _ssl # type:ignore[attr-defined] from pymongo.uri_parser import parse_uri @@ -150,6 +151,16 @@ def _create_user(authdb, user, pwd=None, roles=None, **kwargs): return authdb.command(cmd) +def repl_set_step_down(client, **kwargs): + """Run replSetStepDown, first unfreezing a secondary with replSetFreeze.""" + cmd = SON([("replSetStepDown", 1)]) + cmd.update(kwargs) + + # Unfreeze a secondary to ensure a speedy election. + client.admin.command("replSetFreeze", 0, read_preference=ReadPreference.SECONDARY) + client.admin.command(cmd) + + class client_knobs: def __init__( self, diff --git a/test/test_connections_survive_primary_stepdown_spec.py b/test/test_connections_survive_primary_stepdown_spec.py index fba7675743..54cc4e0482 100644 --- a/test/test_connections_survive_primary_stepdown_spec.py +++ b/test/test_connections_survive_primary_stepdown_spec.py @@ -20,10 +20,10 @@ sys.path[0:0] = [""] from test import IntegrationTest, client_context, unittest +from test.helpers import repl_set_step_down from test.utils import ( CMAPListener, ensure_all_connected, - repl_set_step_down, ) from bson import SON @@ -32,6 +32,8 @@ from pymongo.synchronous.collection import Collection from pymongo.write_concern import WriteConcern +_IS_SYNC = True + class TestConnectionsSurvivePrimaryStepDown(IntegrationTest): listener: CMAPListener @@ -39,8 +41,8 @@ class TestConnectionsSurvivePrimaryStepDown(IntegrationTest): @classmethod @client_context.require_replica_set - def setUpClass(cls): - super().setUpClass() + def _setup_class(cls): + super()._setup_class() cls.listener = CMAPListener() cls.client = cls.unmanaged_rs_or_single_client( event_listeners=[cls.listener], retryWrites=False, heartbeatFrequencyMS=500 @@ -56,7 +58,7 @@ def setUpClass(cls): cls.coll = cls.db.get_collection("step-down", write_concern=WriteConcern("majority")) @classmethod - def tearDownClass(cls): + def _tearDown_class(cls): cls.client.close() def setUp(self): diff --git a/test/utils.py b/test/utils.py index 9615034899..9c78cff3ad 100644 --- a/test/utils.py +++ b/test/utils.py @@ -599,6 +599,44 @@ def discover(): ) +async def async_ensure_all_connected(client: AsyncMongoClient) -> None: + """Ensure that the client's connection pool has socket connections to all + members of a replica set. Raises ConfigurationError when called with a + non-replica set client. + + Depending on the use-case, the caller may need to clear any event listeners + that are configured on the client. + """ + hello: dict = await client.admin.command(HelloCompat.LEGACY_CMD) + if "setName" not in hello: + raise ConfigurationError("cluster is not a replica set") + + target_host_list = set(hello["hosts"] + hello.get("passives", [])) + connected_host_list = {hello["me"]} + + # Run hello until we have connected to each host at least once. + async def discover(): + i = 0 + while i < 100 and connected_host_list != target_host_list: + hello: dict = await client.admin.command( + HelloCompat.LEGACY_CMD, read_preference=ReadPreference.SECONDARY + ) + connected_host_list.update([hello["me"]]) + i += 1 + return connected_host_list + + try: + + async def predicate(): + return target_host_list == await discover() + + await async_wait_until(predicate, "connected to all hosts") + except AssertionError as exc: + raise AssertionError( + f"{exc}, {connected_host_list} != {target_host_list}, {client.topology_description}" + ) + + def one(s): """Get one element of a set""" return next(iter(s)) @@ -761,16 +799,6 @@ async def async_wait_until(predicate, success_description, timeout=10): await asyncio.sleep(interval) -def repl_set_step_down(client, **kwargs): - """Run replSetStepDown, first unfreezing a secondary with replSetFreeze.""" - cmd = SON([("replSetStepDown", 1)]) - cmd.update(kwargs) - - # Unfreeze a secondary to ensure a speedy election. - client.admin.command("replSetFreeze", 0, read_preference=ReadPreference.SECONDARY) - client.admin.command(cmd) - - def is_mongos(client): res = client.admin.command(HelloCompat.LEGACY_CMD) return res.get("msg", "") == "isdbgrid" diff --git a/tools/synchro.py b/tools/synchro.py index 3333b0de2e..d8ec9ae46f 100644 --- a/tools/synchro.py +++ b/tools/synchro.py @@ -105,6 +105,8 @@ "AsyncTestGridFile": "TestGridFile", "AsyncTestGridFileNoConnect": "TestGridFileNoConnect", "async_set_fail_point": "set_fail_point", + "async_ensure_all_connected": "ensure_all_connected", + "async_repl_set_step_down": "repl_set_step_down", } docstring_replacements: dict[tuple[str, str], str] = { @@ -186,6 +188,7 @@ def async_only_test(f: str) -> bool: "test_client_bulk_write.py", "test_client_context.py", "test_collection.py", + "test_connections_survive_primary_stepdown_spec.py", "test_cursor.py", "test_database.py", "test_encryption.py", From 7380097dbca42580f9547bbd632f1efe96afc460 Mon Sep 17 00:00:00 2001 From: "Jeffrey A. Clark" Date: Thu, 3 Oct 2024 13:39:04 -0400 Subject: [PATCH 004/182] PYTHON-3959 - NULL Initialize PyObjects (#1859) --- bson/_cbsonmodule.c | 24 ++++++++++++------------ pymongo/_cmessagemodule.c | 34 +++++++++++++++++----------------- 2 files changed, 29 insertions(+), 29 deletions(-) diff --git a/bson/_cbsonmodule.c b/bson/_cbsonmodule.c index 34b407b940..223c392280 100644 --- a/bson/_cbsonmodule.c +++ b/bson/_cbsonmodule.c @@ -207,7 +207,7 @@ static PyObject* _test_long_long_to_str(PyObject* self, PyObject* args) { * * Returns a new ref */ static PyObject* _error(char* name) { - PyObject* error; + PyObject* error = NULL; PyObject* errors = PyImport_ImportModule("bson.errors"); if (!errors) { return NULL; @@ -279,7 +279,7 @@ static PyObject* datetime_from_millis(long long millis) { * micros = diff * 1000 111000 * Resulting in datetime(1, 1, 1, 1, 1, 1, 111000) -- the expected result */ - PyObject* datetime; + PyObject* datetime = NULL; int diff = (int)(((millis % 1000) + 1000) % 1000); int microseconds = diff * 1000; Time64_T seconds = (millis - diff) / 1000; @@ -294,7 +294,7 @@ static PyObject* datetime_from_millis(long long millis) { timeinfo.tm_sec, microseconds); if(!datetime) { - PyObject *etype, *evalue, *etrace; + PyObject *etype = NULL, *evalue = NULL, *etrace = NULL; /* * Calling _error clears the error state, so fetch it first. @@ -350,8 +350,8 @@ static PyObject* datetime_ms_from_millis(PyObject* self, long long millis){ return NULL; } - PyObject* dt; - PyObject* ll_millis; + PyObject* dt = NULL; + PyObject* ll_millis = NULL; if (!(ll_millis = PyLong_FromLongLong(millis))){ return NULL; @@ -1790,7 +1790,7 @@ static PyObject* _cbson_dict_to_bson(PyObject* self, PyObject* args) { PyObject* result; unsigned char check_keys; unsigned char top_level = 1; - PyObject* options_obj; + PyObject* options_obj = NULL; codec_options_t options; buffer_t buffer; PyObject* raw_bson_document_bytes_obj; @@ -2512,8 +2512,8 @@ static PyObject* get_value(PyObject* self, PyObject* name, const char* buffer, * Wrap any non-InvalidBSON errors in InvalidBSON. */ if (PyErr_Occurred()) { - PyObject *etype, *evalue, *etrace; - PyObject *InvalidBSON; + PyObject *etype = NULL, *evalue = NULL, *etrace = NULL; + PyObject *InvalidBSON = NULL; /* * Calling _error clears the error state, so fetch it first. @@ -2585,8 +2585,8 @@ static int _element_to_dict(PyObject* self, const char* string, if (!*name) { /* If NULL is returned then wrap the UnicodeDecodeError in an InvalidBSON error */ - PyObject *etype, *evalue, *etrace; - PyObject *InvalidBSON; + PyObject *etype = NULL, *evalue = NULL, *etrace = NULL; + PyObject *InvalidBSON = NULL; PyErr_Fetch(&etype, &evalue, &etrace); if (PyErr_GivenExceptionMatches(etype, PyExc_Exception)) { @@ -2620,7 +2620,7 @@ static PyObject* _cbson_element_to_dict(PyObject* self, PyObject* args) { /* TODO: Support buffer protocol */ char* string; PyObject* bson; - PyObject* options_obj; + PyObject* options_obj = NULL; codec_options_t options; unsigned position; unsigned max; @@ -2732,7 +2732,7 @@ static PyObject* _cbson_bson_to_dict(PyObject* self, PyObject* args) { int32_t size; Py_ssize_t total_size; const char* string; - PyObject* bson; + PyObject* bson = NULL; codec_options_t options; PyObject* result = NULL; PyObject* options_obj; diff --git a/pymongo/_cmessagemodule.c b/pymongo/_cmessagemodule.c index f95b949380..b5adbeec32 100644 --- a/pymongo/_cmessagemodule.c +++ b/pymongo/_cmessagemodule.c @@ -45,7 +45,7 @@ struct module_state { * * Returns a new ref */ static PyObject* _error(char* name) { - PyObject* error; + PyObject* error = NULL; PyObject* errors = PyImport_ImportModule("pymongo.errors"); if (!errors) { return NULL; @@ -75,9 +75,9 @@ static PyObject* _cbson_query_message(PyObject* self, PyObject* args) { int begin, cur_size, max_size = 0; int num_to_skip; int num_to_return; - PyObject* query; - PyObject* field_selector; - PyObject* options_obj; + PyObject* query = NULL; + PyObject* field_selector = NULL; + PyObject* options_obj = NULL; codec_options_t options; buffer_t buffer = NULL; int length_location, message_length; @@ -221,12 +221,12 @@ static PyObject* _cbson_op_msg(PyObject* self, PyObject* args) { /* NOTE just using a random number as the request_id */ int request_id = rand(); unsigned int flags; - PyObject* command; + PyObject* command = NULL; char* identifier = NULL; Py_ssize_t identifier_length = 0; - PyObject* docs; - PyObject* doc; - PyObject* options_obj; + PyObject* docs = NULL; + PyObject* doc = NULL; + PyObject* options_obj = NULL; codec_options_t options; buffer_t buffer = NULL; int length_location, message_length; @@ -535,12 +535,12 @@ static PyObject* _cbson_encode_batched_op_msg(PyObject* self, PyObject* args) { unsigned char op; unsigned char ack; - PyObject* command; - PyObject* docs; + PyObject* command = NULL; + PyObject* docs = NULL; PyObject* ctx = NULL; PyObject* to_publish = NULL; PyObject* result = NULL; - PyObject* options_obj; + PyObject* options_obj = NULL; codec_options_t options; buffer_t buffer; struct module_state *state = GETSTATE(self); @@ -592,12 +592,12 @@ _cbson_batched_op_msg(PyObject* self, PyObject* args) { unsigned char ack; int request_id; int position; - PyObject* command; - PyObject* docs; + PyObject* command = NULL; + PyObject* docs = NULL; PyObject* ctx = NULL; PyObject* to_publish = NULL; PyObject* result = NULL; - PyObject* options_obj; + PyObject* options_obj = NULL; codec_options_t options; buffer_t buffer; struct module_state *state = GETSTATE(self); @@ -868,12 +868,12 @@ _cbson_encode_batched_write_command(PyObject* self, PyObject* args) { char *ns = NULL; unsigned char op; Py_ssize_t ns_len; - PyObject* command; - PyObject* docs; + PyObject* command = NULL; + PyObject* docs = NULL; PyObject* ctx = NULL; PyObject* to_publish = NULL; PyObject* result = NULL; - PyObject* options_obj; + PyObject* options_obj = NULL; codec_options_t options; buffer_t buffer; struct module_state *state = GETSTATE(self); From b111cbf5d5dab906a94d2c4b2a209cfde2971a94 Mon Sep 17 00:00:00 2001 From: Noah Stapp Date: Thu, 3 Oct 2024 15:18:33 -0400 Subject: [PATCH 005/182] PYTHON-4636 - Avoid blocking I/O calls in async code paths (#1870) Co-authored-by: Shane Harvey --- pymongo/asynchronous/network.py | 81 +---------- pymongo/network_layer.py | 230 +++++++++++++++++++++++++++++-- pymongo/pyopenssl_context.py | 13 +- pymongo/synchronous/network.py | 77 +---------- test/asynchronous/test_client.py | 6 +- test/test_client.py | 6 +- tools/synchro.py | 1 + 7 files changed, 248 insertions(+), 166 deletions(-) diff --git a/pymongo/asynchronous/network.py b/pymongo/asynchronous/network.py index 44a63a2fc3..d17aead120 100644 --- a/pymongo/asynchronous/network.py +++ b/pymongo/asynchronous/network.py @@ -15,11 +15,8 @@ """Internal network layer helper methods.""" from __future__ import annotations -import asyncio import datetime -import errno import logging -import socket import time from typing import ( TYPE_CHECKING, @@ -40,19 +37,16 @@ NotPrimaryError, OperationFailure, ProtocolError, - _OperationCancelled, ) from pymongo.logger import _COMMAND_LOGGER, _CommandStatusMessage, _debug_log from pymongo.message import _UNPACK_REPLY, _OpMsg, _OpReply from pymongo.monitoring import _is_speculative_authenticate from pymongo.network_layer import ( - _POLL_TIMEOUT, _UNPACK_COMPRESSION_HEADER, _UNPACK_HEADER, - BLOCKING_IO_ERRORS, + async_receive_data, async_sendall, ) -from pymongo.socket_checker import _errno_from_exception if TYPE_CHECKING: from bson import CodecOptions @@ -318,9 +312,7 @@ async def receive_message( else: deadline = None # Ignore the response's request id. - length, _, response_to, op_code = _UNPACK_HEADER( - await _receive_data_on_socket(conn, 16, deadline) - ) + length, _, response_to, op_code = _UNPACK_HEADER(await async_receive_data(conn, 16, deadline)) # No request_id for exhaust cursor "getMore". if request_id is not None: if request_id != response_to: @@ -336,11 +328,11 @@ async def receive_message( ) if op_code == 2012: op_code, _, compressor_id = _UNPACK_COMPRESSION_HEADER( - await _receive_data_on_socket(conn, 9, deadline) + await async_receive_data(conn, 9, deadline) ) - data = decompress(await _receive_data_on_socket(conn, length - 25, deadline), compressor_id) + data = decompress(await async_receive_data(conn, length - 25, deadline), compressor_id) else: - data = await _receive_data_on_socket(conn, length - 16, deadline) + data = await async_receive_data(conn, length - 16, deadline) try: unpack_reply = _UNPACK_REPLY[op_code] @@ -349,66 +341,3 @@ async def receive_message( f"Got opcode {op_code!r} but expected {_UNPACK_REPLY.keys()!r}" ) from None return unpack_reply(data) - - -async def wait_for_read(conn: AsyncConnection, deadline: Optional[float]) -> None: - """Block until at least one byte is read, or a timeout, or a cancel.""" - sock = conn.conn - timed_out = False - # Check if the connection's socket has been manually closed - if sock.fileno() == -1: - return - while True: - # SSLSocket can have buffered data which won't be caught by select. - if hasattr(sock, "pending") and sock.pending() > 0: - readable = True - else: - # Wait up to 500ms for the socket to become readable and then - # check for cancellation. - if deadline: - remaining = deadline - time.monotonic() - # When the timeout has expired perform one final check to - # see if the socket is readable. This helps avoid spurious - # timeouts on AWS Lambda and other FaaS environments. - if remaining <= 0: - timed_out = True - timeout = max(min(remaining, _POLL_TIMEOUT), 0) - else: - timeout = _POLL_TIMEOUT - readable = conn.socket_checker.select(sock, read=True, timeout=timeout) - if conn.cancel_context.cancelled: - raise _OperationCancelled("operation cancelled") - if readable: - return - if timed_out: - raise socket.timeout("timed out") - await asyncio.sleep(0) - - -async def _receive_data_on_socket( - conn: AsyncConnection, length: int, deadline: Optional[float] -) -> memoryview: - buf = bytearray(length) - mv = memoryview(buf) - bytes_read = 0 - while bytes_read < length: - try: - await wait_for_read(conn, deadline) - # CSOT: Update timeout. When the timeout has expired perform one - # final non-blocking recv. This helps avoid spurious timeouts when - # the response is actually already buffered on the client. - if _csot.get_timeout() and deadline is not None: - conn.set_conn_timeout(max(deadline - time.monotonic(), 0)) - chunk_length = conn.conn.recv_into(mv[bytes_read:]) - except BLOCKING_IO_ERRORS: - raise socket.timeout("timed out") from None - except OSError as exc: - if _errno_from_exception(exc) == errno.EINTR: - continue - raise - if chunk_length == 0: - raise OSError("connection closed") - - bytes_read += chunk_length - - return mv diff --git a/pymongo/network_layer.py b/pymongo/network_layer.py index 82a6228acc..4b57620d83 100644 --- a/pymongo/network_layer.py +++ b/pymongo/network_layer.py @@ -16,15 +16,21 @@ from __future__ import annotations import asyncio +import errno import socket import struct import sys +import time from asyncio import AbstractEventLoop, Future from typing import ( + TYPE_CHECKING, + Optional, Union, ) -from pymongo import ssl_support +from pymongo import _csot, ssl_support +from pymongo.errors import _OperationCancelled +from pymongo.socket_checker import _errno_from_exception try: from ssl import SSLError, SSLSocket @@ -51,6 +57,10 @@ BLOCKING_IO_WRITE_ERROR, ) +if TYPE_CHECKING: + from pymongo.asynchronous.pool import AsyncConnection + from pymongo.synchronous.pool import Connection + _UNPACK_HEADER = struct.Struct(" None: view = memoryview(buf) - fd = sock.fileno() sent = 0 def _is_ready(fut: Future) -> None: - loop.remove_writer(fd) - loop.remove_reader(fd) if fut.done(): return fut.set_result(None) @@ -101,33 +108,240 @@ def _is_ready(fut: Future) -> None: if isinstance(exc, BLOCKING_IO_READ_ERROR): fut = loop.create_future() loop.add_reader(fd, _is_ready, fut) - await fut + try: + await fut + finally: + loop.remove_reader(fd) if isinstance(exc, BLOCKING_IO_WRITE_ERROR): fut = loop.create_future() loop.add_writer(fd, _is_ready, fut) - await fut + try: + await fut + finally: + loop.remove_writer(fd) if _HAVE_PYOPENSSL and isinstance(exc, BLOCKING_IO_LOOKUP_ERROR): fut = loop.create_future() loop.add_reader(fd, _is_ready, fut) + try: + loop.add_writer(fd, _is_ready, fut) + await fut + finally: + loop.remove_reader(fd) + loop.remove_writer(fd) + + async def _async_receive_ssl( + conn: _sslConn, length: int, loop: AbstractEventLoop + ) -> memoryview: + mv = memoryview(bytearray(length)) + total_read = 0 + + def _is_ready(fut: Future) -> None: + if fut.done(): + return + fut.set_result(None) + + while total_read < length: + try: + read = conn.recv_into(mv[total_read:]) + if read == 0: + raise OSError("connection closed") + total_read += read + except BLOCKING_IO_ERRORS as exc: + fd = conn.fileno() + # Check for closed socket. + if fd == -1: + raise SSLError("Underlying socket has been closed") from None + if isinstance(exc, BLOCKING_IO_READ_ERROR): + fut = loop.create_future() + loop.add_reader(fd, _is_ready, fut) + try: + await fut + finally: + loop.remove_reader(fd) + if isinstance(exc, BLOCKING_IO_WRITE_ERROR): + fut = loop.create_future() loop.add_writer(fd, _is_ready, fut) - await fut + try: + await fut + finally: + loop.remove_writer(fd) + if _HAVE_PYOPENSSL and isinstance(exc, BLOCKING_IO_LOOKUP_ERROR): + fut = loop.create_future() + loop.add_reader(fd, _is_ready, fut) + try: + loop.add_writer(fd, _is_ready, fut) + await fut + finally: + loop.remove_reader(fd) + loop.remove_writer(fd) + return mv + else: # The default Windows asyncio event loop does not support loop.add_reader/add_writer: # https://docs.python.org/3/library/asyncio-platforms.html#asyncio-platform-support + # Note: In PYTHON-4493 we plan to replace this code with asyncio streams. async def _async_sendall_ssl( sock: Union[socket.socket, _sslConn], buf: bytes, dummy: AbstractEventLoop ) -> None: view = memoryview(buf) total_length = len(buf) total_sent = 0 + # Backoff starts at 1ms, doubles on timeout up to 512ms, and halves on success + # down to 1ms. + backoff = 0.001 while total_sent < total_length: try: sent = sock.send(view[total_sent:]) except BLOCKING_IO_ERRORS: - await asyncio.sleep(0.5) + await asyncio.sleep(backoff) sent = 0 + if sent > 0: + backoff = max(backoff / 2, 0.001) + else: + backoff = min(backoff * 2, 0.512) total_sent += sent + async def _async_receive_ssl( + conn: _sslConn, length: int, dummy: AbstractEventLoop + ) -> memoryview: + mv = memoryview(bytearray(length)) + total_read = 0 + # Backoff starts at 1ms, doubles on timeout up to 512ms, and halves on success + # down to 1ms. + backoff = 0.001 + while total_read < length: + try: + read = conn.recv_into(mv[total_read:]) + if read == 0: + raise OSError("connection closed") + except BLOCKING_IO_ERRORS: + await asyncio.sleep(backoff) + read = 0 + if read > 0: + backoff = max(backoff / 2, 0.001) + else: + backoff = min(backoff * 2, 0.512) + total_read += read + return mv + def sendall(sock: Union[socket.socket, _sslConn], buf: bytes) -> None: sock.sendall(buf) + + +async def _poll_cancellation(conn: AsyncConnection) -> None: + while True: + if conn.cancel_context.cancelled: + return + + await asyncio.sleep(_POLL_TIMEOUT) + + +async def async_receive_data( + conn: AsyncConnection, length: int, deadline: Optional[float] +) -> memoryview: + sock = conn.conn + sock_timeout = sock.gettimeout() + timeout: Optional[Union[float, int]] + if deadline: + # When the timeout has expired perform one final check to + # see if the socket is readable. This helps avoid spurious + # timeouts on AWS Lambda and other FaaS environments. + timeout = max(deadline - time.monotonic(), 0) + else: + timeout = sock_timeout + + sock.settimeout(0.0) + loop = asyncio.get_event_loop() + cancellation_task = asyncio.create_task(_poll_cancellation(conn)) + try: + if _HAVE_SSL and isinstance(sock, (SSLSocket, _sslConn)): + read_task = asyncio.create_task(_async_receive_ssl(sock, length, loop)) # type: ignore[arg-type] + else: + read_task = asyncio.create_task(_async_receive(sock, length, loop)) # type: ignore[arg-type] + tasks = [read_task, cancellation_task] + done, pending = await asyncio.wait( + tasks, timeout=timeout, return_when=asyncio.FIRST_COMPLETED + ) + for task in pending: + task.cancel() + await asyncio.wait(pending) + if len(done) == 0: + raise socket.timeout("timed out") + if read_task in done: + return read_task.result() + raise _OperationCancelled("operation cancelled") + finally: + sock.settimeout(sock_timeout) + + +async def _async_receive(conn: socket.socket, length: int, loop: AbstractEventLoop) -> memoryview: + mv = memoryview(bytearray(length)) + bytes_read = 0 + while bytes_read < length: + chunk_length = await loop.sock_recv_into(conn, mv[bytes_read:]) + if chunk_length == 0: + raise OSError("connection closed") + bytes_read += chunk_length + return mv + + +# Sync version: +def wait_for_read(conn: Connection, deadline: Optional[float]) -> None: + """Block until at least one byte is read, or a timeout, or a cancel.""" + sock = conn.conn + timed_out = False + # Check if the connection's socket has been manually closed + if sock.fileno() == -1: + return + while True: + # SSLSocket can have buffered data which won't be caught by select. + if hasattr(sock, "pending") and sock.pending() > 0: + readable = True + else: + # Wait up to 500ms for the socket to become readable and then + # check for cancellation. + if deadline: + remaining = deadline - time.monotonic() + # When the timeout has expired perform one final check to + # see if the socket is readable. This helps avoid spurious + # timeouts on AWS Lambda and other FaaS environments. + if remaining <= 0: + timed_out = True + timeout = max(min(remaining, _POLL_TIMEOUT), 0) + else: + timeout = _POLL_TIMEOUT + readable = conn.socket_checker.select(sock, read=True, timeout=timeout) + if conn.cancel_context.cancelled: + raise _OperationCancelled("operation cancelled") + if readable: + return + if timed_out: + raise socket.timeout("timed out") + + +def receive_data(conn: Connection, length: int, deadline: Optional[float]) -> memoryview: + buf = bytearray(length) + mv = memoryview(buf) + bytes_read = 0 + while bytes_read < length: + try: + wait_for_read(conn, deadline) + # CSOT: Update timeout. When the timeout has expired perform one + # final non-blocking recv. This helps avoid spurious timeouts when + # the response is actually already buffered on the client. + if _csot.get_timeout() and deadline is not None: + conn.set_conn_timeout(max(deadline - time.monotonic(), 0)) + chunk_length = conn.conn.recv_into(mv[bytes_read:]) + except BLOCKING_IO_ERRORS: + raise socket.timeout("timed out") from None + except OSError as exc: + if _errno_from_exception(exc) == errno.EINTR: + continue + raise + if chunk_length == 0: + raise OSError("connection closed") + + bytes_read += chunk_length + + return mv diff --git a/pymongo/pyopenssl_context.py b/pymongo/pyopenssl_context.py index 4f6f6f4a89..50d8680a74 100644 --- a/pymongo/pyopenssl_context.py +++ b/pymongo/pyopenssl_context.py @@ -105,13 +105,19 @@ def _ragged_eof(exc: BaseException) -> bool: # https://docs.python.org/3/library/ssl.html#notes-on-non-blocking-sockets class _sslConn(_SSL.Connection): def __init__( - self, ctx: _SSL.Context, sock: Optional[_socket.socket], suppress_ragged_eofs: bool + self, + ctx: _SSL.Context, + sock: Optional[_socket.socket], + suppress_ragged_eofs: bool, + is_async: bool = False, ): self.socket_checker = _SocketChecker() self.suppress_ragged_eofs = suppress_ragged_eofs super().__init__(ctx, sock) + self._is_async = is_async def _call(self, call: Callable[..., _T], *args: Any, **kwargs: Any) -> _T: + is_async = kwargs.pop("allow_async", True) and self._is_async timeout = self.gettimeout() if timeout: start = _time.monotonic() @@ -119,6 +125,8 @@ def _call(self, call: Callable[..., _T], *args: Any, **kwargs: Any) -> _T: try: return call(*args, **kwargs) except BLOCKING_IO_ERRORS as exc: + if is_async: + raise exc # Check for closed socket. if self.fileno() == -1: if timeout and _time.monotonic() - start > timeout: @@ -139,6 +147,7 @@ def _call(self, call: Callable[..., _T], *args: Any, **kwargs: Any) -> _T: continue def do_handshake(self, *args: Any, **kwargs: Any) -> None: + kwargs["allow_async"] = False return self._call(super().do_handshake, *args, **kwargs) def recv(self, *args: Any, **kwargs: Any) -> bytes: @@ -381,7 +390,7 @@ async def a_wrap_socket( """Wrap an existing Python socket connection and return a TLS socket object. """ - ssl_conn = _sslConn(self._ctx, sock, suppress_ragged_eofs) + ssl_conn = _sslConn(self._ctx, sock, suppress_ragged_eofs, True) loop = asyncio.get_running_loop() if session: ssl_conn.set_session(session) diff --git a/pymongo/synchronous/network.py b/pymongo/synchronous/network.py index c1978087a9..7206dca735 100644 --- a/pymongo/synchronous/network.py +++ b/pymongo/synchronous/network.py @@ -16,9 +16,7 @@ from __future__ import annotations import datetime -import errno import logging -import socket import time from typing import ( TYPE_CHECKING, @@ -39,19 +37,16 @@ NotPrimaryError, OperationFailure, ProtocolError, - _OperationCancelled, ) from pymongo.logger import _COMMAND_LOGGER, _CommandStatusMessage, _debug_log from pymongo.message import _UNPACK_REPLY, _OpMsg, _OpReply from pymongo.monitoring import _is_speculative_authenticate from pymongo.network_layer import ( - _POLL_TIMEOUT, _UNPACK_COMPRESSION_HEADER, _UNPACK_HEADER, - BLOCKING_IO_ERRORS, + receive_data, sendall, ) -from pymongo.socket_checker import _errno_from_exception if TYPE_CHECKING: from bson import CodecOptions @@ -317,7 +312,7 @@ def receive_message( else: deadline = None # Ignore the response's request id. - length, _, response_to, op_code = _UNPACK_HEADER(_receive_data_on_socket(conn, 16, deadline)) + length, _, response_to, op_code = _UNPACK_HEADER(receive_data(conn, 16, deadline)) # No request_id for exhaust cursor "getMore". if request_id is not None: if request_id != response_to: @@ -332,12 +327,10 @@ def receive_message( f"message size ({max_message_size!r})" ) if op_code == 2012: - op_code, _, compressor_id = _UNPACK_COMPRESSION_HEADER( - _receive_data_on_socket(conn, 9, deadline) - ) - data = decompress(_receive_data_on_socket(conn, length - 25, deadline), compressor_id) + op_code, _, compressor_id = _UNPACK_COMPRESSION_HEADER(receive_data(conn, 9, deadline)) + data = decompress(receive_data(conn, length - 25, deadline), compressor_id) else: - data = _receive_data_on_socket(conn, length - 16, deadline) + data = receive_data(conn, length - 16, deadline) try: unpack_reply = _UNPACK_REPLY[op_code] @@ -346,63 +339,3 @@ def receive_message( f"Got opcode {op_code!r} but expected {_UNPACK_REPLY.keys()!r}" ) from None return unpack_reply(data) - - -def wait_for_read(conn: Connection, deadline: Optional[float]) -> None: - """Block until at least one byte is read, or a timeout, or a cancel.""" - sock = conn.conn - timed_out = False - # Check if the connection's socket has been manually closed - if sock.fileno() == -1: - return - while True: - # SSLSocket can have buffered data which won't be caught by select. - if hasattr(sock, "pending") and sock.pending() > 0: - readable = True - else: - # Wait up to 500ms for the socket to become readable and then - # check for cancellation. - if deadline: - remaining = deadline - time.monotonic() - # When the timeout has expired perform one final check to - # see if the socket is readable. This helps avoid spurious - # timeouts on AWS Lambda and other FaaS environments. - if remaining <= 0: - timed_out = True - timeout = max(min(remaining, _POLL_TIMEOUT), 0) - else: - timeout = _POLL_TIMEOUT - readable = conn.socket_checker.select(sock, read=True, timeout=timeout) - if conn.cancel_context.cancelled: - raise _OperationCancelled("operation cancelled") - if readable: - return - if timed_out: - raise socket.timeout("timed out") - - -def _receive_data_on_socket(conn: Connection, length: int, deadline: Optional[float]) -> memoryview: - buf = bytearray(length) - mv = memoryview(buf) - bytes_read = 0 - while bytes_read < length: - try: - wait_for_read(conn, deadline) - # CSOT: Update timeout. When the timeout has expired perform one - # final non-blocking recv. This helps avoid spurious timeouts when - # the response is actually already buffered on the client. - if _csot.get_timeout() and deadline is not None: - conn.set_conn_timeout(max(deadline - time.monotonic(), 0)) - chunk_length = conn.conn.recv_into(mv[bytes_read:]) - except BLOCKING_IO_ERRORS: - raise socket.timeout("timed out") from None - except OSError as exc: - if _errno_from_exception(exc) == errno.EINTR: - continue - raise - if chunk_length == 0: - raise OSError("connection closed") - - bytes_read += chunk_length - - return mv diff --git a/test/asynchronous/test_client.py b/test/asynchronous/test_client.py index 5c06331790..2052d1cd7f 100644 --- a/test/asynchronous/test_client.py +++ b/test/asynchronous/test_client.py @@ -1713,6 +1713,7 @@ def compression_settings(client): # No error await client.pymongo_test.test.find_one() + @async_client_context.require_sync async def test_reset_during_update_pool(self): client = await self.async_rs_or_single_client(minPoolSize=10) await client.admin.command("ping") @@ -1737,10 +1738,7 @@ async def _run(self): await asyncio.sleep(0.001) def run(self): - if _IS_SYNC: - self._run() - else: - asyncio.run(self._run()) + self._run() t = ResetPoolThread(pool) t.start() diff --git a/test/test_client.py b/test/test_client.py index c88a8fd9b4..936c38b8c6 100644 --- a/test/test_client.py +++ b/test/test_client.py @@ -1671,6 +1671,7 @@ def compression_settings(client): # No error client.pymongo_test.test.find_one() + @client_context.require_sync def test_reset_during_update_pool(self): client = self.rs_or_single_client(minPoolSize=10) client.admin.command("ping") @@ -1695,10 +1696,7 @@ def _run(self): time.sleep(0.001) def run(self): - if _IS_SYNC: - self._run() - else: - asyncio.run(self._run()) + self._run() t = ResetPoolThread(pool) t.start() diff --git a/tools/synchro.py b/tools/synchro.py index d8ec9ae46f..585fc5fefd 100644 --- a/tools/synchro.py +++ b/tools/synchro.py @@ -43,6 +43,7 @@ "AsyncConnection": "Connection", "async_command": "command", "async_receive_message": "receive_message", + "async_receive_data": "receive_data", "async_sendall": "sendall", "asynchronous": "synchronous", "Asynchronous": "Synchronous", From 68127d5efd3580ad718a437eff91230c0f70e20e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 7 Oct 2024 04:49:40 -0500 Subject: [PATCH 006/182] Bump the actions group with 2 updates (#1897) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/codeql.yml | 2 +- .github/workflows/test-python.yml | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 370b8759e6..2dc070d7c6 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -39,7 +39,7 @@ jobs: uses: actions/checkout@v4 with: ref: ${{ inputs.ref }} - - uses: actions/setup-python@v3 + - uses: actions/setup-python@v5 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL diff --git a/.github/workflows/test-python.yml b/.github/workflows/test-python.yml index 921168c130..3ecdfa52f3 100644 --- a/.github/workflows/test-python.yml +++ b/.github/workflows/test-python.yml @@ -72,7 +72,7 @@ jobs: pip install hatch fi - name: Start MongoDB - uses: supercharge/mongodb-github-action@1.10.0 + uses: supercharge/mongodb-github-action@1.11.0 with: mongodb-version: 6.0 - name: Run tests @@ -94,7 +94,7 @@ jobs: run: | pip install -U hatch pip - name: Start MongoDB - uses: supercharge/mongodb-github-action@1.10.0 + uses: supercharge/mongodb-github-action@1.11.0 with: mongodb-version: '8.0.0-rc4' - name: Run tests @@ -201,7 +201,7 @@ jobs: # Test sdist on lowest supported Python python-version: '3.8' - name: Start MongoDB - uses: supercharge/mongodb-github-action@1.10.0 + uses: supercharge/mongodb-github-action@1.11.0 - name: Run connect test from sdist shell: bash run: | From def3c11787530290c073080eaceb3682d578b73d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 7 Oct 2024 04:50:49 -0500 Subject: [PATCH 007/182] Bump furo from 2023.9.10 to 2024.8.6 (#1898) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/docs.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/docs.txt b/requirements/docs.txt index 16b2746866..d3f0c73034 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -3,4 +3,4 @@ sphinx_rtd_theme>=2,<3 readthedocs-sphinx-search~=0.3 sphinxcontrib-shellcheck>=1,<2 sphinx-autobuild>=2020.9.1 -furo==2023.9.10 +furo==2024.8.6 From 093d5bebde9c4a12ca05edd0c41c350dba472f67 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 7 Oct 2024 04:51:09 -0500 Subject: [PATCH 008/182] Bump pyright from 1.1.382.post1 to 1.1.383 (#1899) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/typing.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/typing.txt b/requirements/typing.txt index 1669e6bbc2..06c33c6db6 100644 --- a/requirements/typing.txt +++ b/requirements/typing.txt @@ -1,5 +1,5 @@ mypy==1.11.2 -pyright==1.1.382.post1 +pyright==1.1.383 typing_extensions -r ./encryption.txt -r ./ocsp.txt From c48dc692824a4d10a939f3477cb1c0dceb4d5dcc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 7 Oct 2024 04:52:14 -0500 Subject: [PATCH 009/182] Update sphinx requirement from <8,>=5.3 to >=5.3,<9 (#1901) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/docs.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/docs.txt b/requirements/docs.txt index d3f0c73034..34723e6ea0 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -1,4 +1,4 @@ -sphinx>=5.3,<8 +sphinx>=5.3,<9 sphinx_rtd_theme>=2,<3 readthedocs-sphinx-search~=0.3 sphinxcontrib-shellcheck>=1,<2 From 006a9960f07c06bad9c5803ddbf8dc1750743d6d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 7 Oct 2024 04:55:07 -0500 Subject: [PATCH 010/182] Update sphinx-rtd-theme requirement from <3,>=2 to >=2,<4 (#1900) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/docs.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/docs.txt b/requirements/docs.txt index 34723e6ea0..7d52c1cb3e 100644 --- a/requirements/docs.txt +++ b/requirements/docs.txt @@ -1,5 +1,5 @@ sphinx>=5.3,<9 -sphinx_rtd_theme>=2,<3 +sphinx_rtd_theme>=2,<4 readthedocs-sphinx-search~=0.3 sphinxcontrib-shellcheck>=1,<2 sphinx-autobuild>=2020.9.1 From 5a66e992542f635175f58a43e37607a6cf6c4717 Mon Sep 17 00:00:00 2001 From: Noah Stapp Date: Tue, 8 Oct 2024 14:52:14 -0400 Subject: [PATCH 011/182] PYTHON-4828 - Drop Python 3.8 support (#1902) --- .evergreen/config.yml | 44 ++++++++++++------------------- .evergreen/utils.sh | 32 +++++++++++----------- .github/workflows/dist.yml | 7 +++-- .github/workflows/test-python.yml | 16 +++++------ CONTRIBUTING.md | 2 +- README.md | 2 +- doc/changelog.rst | 5 ++++ doc/faq.rst | 2 +- doc/installation.rst | 4 +-- doc/python3.rst | 2 +- pyproject.toml | 3 +-- test/asynchronous/conftest.py | 2 -- test/conftest.py | 2 -- 13 files changed, 56 insertions(+), 67 deletions(-) diff --git a/.evergreen/config.yml b/.evergreen/config.yml index 14e3426b32..7fb48c8054 100644 --- a/.evergreen/config.yml +++ b/.evergreen/config.yml @@ -2233,10 +2233,6 @@ axes: values: # Note: always display platform with python-version to avoid ambiguous display names. # Linux - - id: "3.8" - display_name: "Python 3.8" - variables: - PYTHON_BINARY: "/opt/python/3.8/bin/python3" - id: "3.9" display_name: "Python 3.9" variables: @@ -2269,10 +2265,6 @@ axes: - id: python-version-windows display_name: "Python" values: - - id: "3.8" - display_name: "Python 3.8" - variables: - PYTHON_BINARY: "C:/python/Python38/python.exe" - id: "3.9" display_name: "Python 3.9" variables: @@ -2297,10 +2289,8 @@ axes: - id: python-version-windows-32 display_name: "Python" values: - - id: "3.8" - display_name: "32-bit Python 3.8" - variables: - PYTHON_BINARY: "C:/python/32/Python38/python.exe" + + - id: "3.9" display_name: "32-bit Python 3.9" variables: @@ -2581,10 +2571,10 @@ buildvariants: auth: "*" ssl: "ssl" pyopenssl: "*" - # Only test "noauth" with Python 3.8. + # Only test "noauth" with Python 3.9. exclude_spec: platform: rhel8 - python-version: ["3.9", "3.10", "3.11", "3.12", "3.13", "pypy3.9", "pypy3.10"] + python-version: ["3.10", "3.11", "3.12", "3.13", "pypy3.9", "pypy3.10"] auth: "noauth" ssl: "ssl" pyopenssl: "*" @@ -2716,7 +2706,7 @@ buildvariants: matrix_spec: platform: rhel7 # Python 3.10+ requires OpenSSL 1.1.1+ - python-version: ["3.8", "3.9"] + python-version: ["3.9"] auth-ssl: "*" display_name: "OpenSSL 1.0.2 ${python-version} ${platform} ${auth-ssl}" tasks: @@ -2739,12 +2729,12 @@ buildvariants: then: add_tasks: *encryption-server-versions -# Storage engine tests on RHEL 8.4 (x86_64) with Python 3.8. +# Storage engine tests on RHEL 8.4 (x86_64) with Python 3.9. - matrix_name: "tests-storage-engines" matrix_spec: platform: rhel8 storage-engine: "*" - python-version: 3.8 + python-version: 3.9 display_name: "Storage ${storage-engine} ${python-version} ${platform}" rules: - if: @@ -2774,12 +2764,12 @@ buildvariants: - "test-3.6-standalone" - "test-3.6-replica_set" -# enableTestCommands=0 tests on RHEL 8.4 (x86_64) with Python 3.8. +# enableTestCommands=0 tests on RHEL 8.4 (x86_64) with Python 3.9. - matrix_name: "test-disableTestCommands" matrix_spec: platform: rhel8 disableTestCommands: "*" - python-version: "3.8" + python-version: "3.9" display_name: "Disable test commands ${python-version} ${platform}" tasks: - ".latest" @@ -2805,7 +2795,7 @@ buildvariants: - matrix_name: "test-search-index-helpers" matrix_spec: platform: rhel8 - python-version: "3.8" + python-version: "3.9" display_name: "Search Index Helpers ${platform}" tasks: - name: "test_atlas_task_group_search_indexes" @@ -2813,7 +2803,7 @@ buildvariants: - matrix_name: "tests-mod-wsgi" matrix_spec: platform: ubuntu-22.04 - python-version: ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] mod-wsgi-version: "*" display_name: "${mod-wsgi-version} ${python-version} ${platform}" tasks: @@ -2825,7 +2815,7 @@ buildvariants: - matrix_name: "mockupdb-tests" matrix_spec: platform: rhel8 - python-version: 3.8 + python-version: 3.9 display_name: "MockupDB Tests" tasks: - name: "mockupdb" @@ -2833,7 +2823,7 @@ buildvariants: - matrix_name: "tests-doctests" matrix_spec: platform: rhel8 - python-version: ["3.8"] + python-version: ["3.9"] display_name: "Doctests ${python-version} ${platform}" tasks: - name: "doctests" @@ -2873,7 +2863,7 @@ buildvariants: - matrix_name: "data-lake-spec-tests" matrix_spec: platform: ubuntu-22.04 - python-version: ["3.8", "3.10"] + python-version: ["3.9", "3.10"] auth: "auth" c-extensions: "*" display_name: "Atlas Data Lake ${python-version} ${c-extensions}" @@ -2883,7 +2873,7 @@ buildvariants: - matrix_name: "stable-api-tests" matrix_spec: platform: rhel8 - python-version: ["3.8", "3.10"] + python-version: ["3.9", "3.10"] auth: "auth" versionedApi: "*" display_name: "Versioned API ${versionedApi} ${python-version}" @@ -2896,7 +2886,7 @@ buildvariants: - matrix_name: "ocsp-test" matrix_spec: platform: rhel8 - python-version: ["3.8", "3.10", "pypy3.9", "pypy3.10"] + python-version: ["3.9", "3.10", "pypy3.9", "pypy3.10"] mongodb-version: ["4.4", "5.0", "6.0", "7.0", "8.0", "latest"] auth: "noauth" ssl: "ssl" @@ -2908,7 +2898,7 @@ buildvariants: - matrix_name: "ocsp-test-windows" matrix_spec: platform: windows - python-version-windows: ["3.8", "3.10"] + python-version-windows: ["3.9", "3.10"] mongodb-version: ["4.4", "5.0", "6.0", "7.0", "8.0", "latest"] auth: "noauth" ssl: "ssl" diff --git a/.evergreen/utils.sh b/.evergreen/utils.sh index 1a5e2a153f..d44425a905 100755 --- a/.evergreen/utils.sh +++ b/.evergreen/utils.sh @@ -4,8 +4,8 @@ set -o xtrace find_python3() { PYTHON="" - # Add a fallback system python3 if it is available and Python 3.8+. - if is_python_38 "$(command -v python3)"; then + # Add a fallback system python3 if it is available and Python 3.9+. + if is_python_39 "$(command -v python3)"; then PYTHON="$(command -v python3)" fi # Find a suitable toolchain version, if available. @@ -14,23 +14,23 @@ find_python3() { if [ -d "/Library/Frameworks/Python.Framework/Versions/3.10" ]; then PYTHON="/Library/Frameworks/Python.Framework/Versions/3.10/bin/python3" # macos 10.14 - elif [ -d "/Library/Frameworks/Python.Framework/Versions/3.8" ]; then - PYTHON="/Library/Frameworks/Python.Framework/Versions/3.8/bin/python3" + elif [ -d "/Library/Frameworks/Python.Framework/Versions/3.9" ]; then + PYTHON="/Library/Frameworks/Python.Framework/Versions/3.9/bin/python3" fi elif [ "Windows_NT" = "$OS" ]; then # Magic variable in cygwin - PYTHON="C:/python/Python38/python.exe" + PYTHON="C:/python/Python39/python.exe" else - # Prefer our own toolchain, fall back to mongodb toolchain if it has Python 3.8+. - if [ -f "/opt/python/3.8/bin/python3" ]; then - PYTHON="/opt/python/3.8/bin/python3" - elif is_python_38 "$(command -v /opt/mongodbtoolchain/v4/bin/python3)"; then + # Prefer our own toolchain, fall back to mongodb toolchain if it has Python 3.9+. + if [ -f "/opt/python/3.9/bin/python3" ]; then + PYTHON="/opt/python/3.9/bin/python3" + elif is_python_39 "$(command -v /opt/mongodbtoolchain/v4/bin/python3)"; then PYTHON="/opt/mongodbtoolchain/v4/bin/python3" - elif is_python_38 "$(command -v /opt/mongodbtoolchain/v3/bin/python3)"; then + elif is_python_39 "$(command -v /opt/mongodbtoolchain/v3/bin/python3)"; then PYTHON="/opt/mongodbtoolchain/v3/bin/python3" fi fi if [ -z "$PYTHON" ]; then - echo "Cannot test without python3.8+ installed!" + echo "Cannot test without python3.9+ installed!" exit 1 fi echo "$PYTHON" @@ -96,15 +96,15 @@ testinstall () { fi } -# Function that returns success if the provided Python binary is version 3.8 or later +# Function that returns success if the provided Python binary is version 3.9 or later # Usage: -# is_python_38 /path/to/python +# is_python_39 /path/to/python # * param1: Python binary -is_python_38() { +is_python_39() { if [ -z "$1" ]; then return 1 - elif $1 -c "import sys; exit(sys.version_info[:2] < (3, 8))"; then - # runs when sys.version_info[:2] >= (3, 8) + elif $1 -c "import sys; exit(sys.version_info[:2] < (3, 9))"; then + # runs when sys.version_info[:2] >= (3, 9) return 0 else return 1 diff --git a/.github/workflows/dist.yml b/.github/workflows/dist.yml index 7ec55dd3b3..fbc7ff7390 100644 --- a/.github/workflows/dist.yml +++ b/.github/workflows/dist.yml @@ -53,7 +53,7 @@ jobs: - uses: actions/setup-python@v5 with: cache: 'pip' - python-version: 3.8 + python-version: 3.9 cache-dependency-path: 'pyproject.toml' allow-prereleases: true @@ -79,13 +79,12 @@ jobs: env: CIBW_MANYLINUX_X86_64_IMAGE: manylinux1 CIBW_MANYLINUX_I686_IMAGE: manylinux1 - CIBW_BUILD: "cp38-${{ matrix.buildplat[1] }} cp39-${{ matrix.buildplat[1] }}" + CIBW_BUILD: "cp39-${{ matrix.buildplat[1] }} cp39-${{ matrix.buildplat[1] }}" run: python -m cibuildwheel --output-dir wheelhouse - name: Assert all versions in wheelhouse if: ${{ ! startsWith(matrix.buildplat[1], 'macos') }} run: | - ls wheelhouse/*cp38*.whl ls wheelhouse/*cp39*.whl ls wheelhouse/*cp310*.whl ls wheelhouse/*cp311*.whl @@ -109,7 +108,7 @@ jobs: - uses: actions/setup-python@v5 with: # Build sdist on lowest supported Python - python-version: '3.8' + python-version: '3.9' - name: Build SDist run: | diff --git a/.github/workflows/test-python.yml b/.github/workflows/test-python.yml index 3ecdfa52f3..e55444ceca 100644 --- a/.github/workflows/test-python.yml +++ b/.github/workflows/test-python.yml @@ -22,7 +22,7 @@ jobs: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 with: - python-version: "3.8" + python-version: "3.9" cache: 'pip' cache-dependency-path: 'pyproject.toml' - name: Install Python dependencies @@ -51,7 +51,7 @@ jobs: strategy: matrix: os: [ubuntu-20.04] - python-version: ["3.8", "pypy-3.9", "3.13"] + python-version: ["3.9", "pypy-3.9", "3.13"] name: CPython ${{ matrix.python-version }}-${{ matrix.os }} steps: - uses: actions/checkout@v4 @@ -87,7 +87,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v5 with: - python-version: "3.8" + python-version: "3.9" cache: 'pip' cache-dependency-path: 'pyproject.toml' - name: Install dependencies @@ -111,7 +111,7 @@ jobs: cache: 'pip' cache-dependency-path: 'pyproject.toml' # Build docs on lowest supported Python for furo - python-version: '3.8' + python-version: '3.9' - name: Install dependencies run: | pip install -U pip hatch @@ -129,7 +129,7 @@ jobs: cache: 'pip' cache-dependency-path: 'pyproject.toml' # Build docs on lowest supported Python for furo - python-version: '3.8' + python-version: '3.9' - name: Install dependencies run: | pip install -U pip hatch @@ -142,7 +142,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ["3.8", "3.11"] + python: ["3.9", "3.11"] steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 @@ -167,7 +167,7 @@ jobs: cache: 'pip' cache-dependency-path: 'pyproject.toml' # Build sdist on lowest supported Python - python-version: '3.8' + python-version: '3.9' - name: Build SDist shell: bash run: | @@ -199,7 +199,7 @@ jobs: cache: 'pip' cache-dependency-path: 'sdist/test/pyproject.toml' # Test sdist on lowest supported Python - python-version: '3.8' + python-version: '3.9' - name: Start MongoDB uses: supercharge/mongodb-github-action@1.11.0 - name: Run connect test from sdist diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 2c2a5f4316..7516fbc9ed 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -16,7 +16,7 @@ be of interest or that has already been addressed. ## Supported Interpreters -PyMongo supports CPython 3.8+ and PyPy3.9+. Language features not +PyMongo supports CPython 3.9+ and PyPy3.9+. Language features not supported by all interpreters can not be used. ## Style Guide diff --git a/README.md b/README.md index 1076b66377..9b5aa33f78 100644 --- a/README.md +++ b/README.md @@ -90,7 +90,7 @@ package that is incompatible with PyMongo. ## Dependencies -PyMongo supports CPython 3.8+ and PyPy3.9+. +PyMongo supports CPython 3.9+ and PyPy3.9+. Required dependencies: diff --git a/doc/changelog.rst b/doc/changelog.rst index 574ecad763..a73a89a0ef 100644 --- a/doc/changelog.rst +++ b/doc/changelog.rst @@ -1,6 +1,11 @@ Changelog ========= +Changes in Version 4.11.0 +------------------------- + +.. warning:: PyMongo 4.11 drops support for Python 3.8: Python 3.9+ or PyPy 3.9+ is now required. + Changes in Version 4.10.1 ------------------------- diff --git a/doc/faq.rst b/doc/faq.rst index f0463badaa..15950e7716 100644 --- a/doc/faq.rst +++ b/doc/faq.rst @@ -166,7 +166,7 @@ they are returned to the pool. Does PyMongo support Python 3? ------------------------------ -PyMongo supports CPython 3.8+ and PyPy3.9+. See the :doc:`python3` for details. +PyMongo supports CPython 3.9+ and PyPy3.9+. See the :doc:`python3` for details. Does PyMongo support asynchronous frameworks like Gevent, asyncio, Tornado, or Twisted? --------------------------------------------------------------------------------------- diff --git a/doc/installation.rst b/doc/installation.rst index ee83b30c6f..dd8eb6ab42 100644 --- a/doc/installation.rst +++ b/doc/installation.rst @@ -28,7 +28,7 @@ To upgrade using pip:: Dependencies ------------ -PyMongo supports CPython 3.8+ and PyPy3.9+. +PyMongo supports CPython 3.9+ and PyPy3.9+. Required dependencies ..................... @@ -140,7 +140,7 @@ See `http://bugs.python.org/issue11623 `_ for a more detailed explanation. **Lion (10.7) and newer** - PyMongo's C extensions can be built against -versions of Python 3.8+ downloaded from python.org. In all cases Xcode must be +versions of Python 3.9+ downloaded from python.org. In all cases Xcode must be installed with 'UNIX Development Support'. **Xcode 5.1**: Starting with version 5.1 the version of clang that ships with diff --git a/doc/python3.rst b/doc/python3.rst index 148c5ee454..1ea43b3ccb 100644 --- a/doc/python3.rst +++ b/doc/python3.rst @@ -4,7 +4,7 @@ Python 3 FAQ What Python 3 versions are supported? ------------------------------------- -PyMongo supports CPython 3.8+ and PyPy3.9+. +PyMongo supports CPython 3.9+ and PyPy3.9+. Are there any PyMongo behavior changes with Python 3? ----------------------------------------------------- diff --git a/pyproject.toml b/pyproject.toml index 30c7c046b9..2688aab27e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,7 +8,7 @@ dynamic = ["version", "dependencies", "optional-dependencies"] description = "Python driver for MongoDB " readme = "README.md" license = {file="LICENSE"} -requires-python = ">=3.8" +requires-python = ">=3.9" authors = [ { name = "The MongoDB Python Team" }, ] @@ -30,7 +30,6 @@ classifiers = [ "Programming Language :: Python :: Implementation :: PyPy", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3 :: Only", - "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", diff --git a/test/asynchronous/conftest.py b/test/asynchronous/conftest.py index c08f224abd..e443dff6c0 100644 --- a/test/asynchronous/conftest.py +++ b/test/asynchronous/conftest.py @@ -17,8 +17,6 @@ def event_loop_policy(): # has issues with sharing sockets across loops (https://github.com/python/cpython/issues/122240) # We explicitly use a different loop implementation here to prevent that issue if sys.platform == "win32": - # Needed for Python 3.8. - asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) return asyncio.WindowsSelectorEventLoopPolicy() # type: ignore[attr-defined] return asyncio.get_event_loop_policy() diff --git a/test/conftest.py b/test/conftest.py index ca817a5a62..a3d954c7c3 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -15,8 +15,6 @@ def event_loop_policy(): # has issues with sharing sockets across loops (https://github.com/python/cpython/issues/122240) # We explicitly use a different loop implementation here to prevent that issue if sys.platform == "win32": - # Needed for Python 3.8. - asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) return asyncio.WindowsSelectorEventLoopPolicy() # type: ignore[attr-defined] return asyncio.get_event_loop_policy() From d21a8ddcff0ce9d54fe5b353bd4477936a02528e Mon Sep 17 00:00:00 2001 From: Noah Stapp Date: Tue, 8 Oct 2024 15:14:54 -0400 Subject: [PATCH 012/182] PYTHON-4827 - Fix dnspython typechecking failures (#1903) Co-authored-by: Steven Silvester --- pymongo/srv_resolver.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/pymongo/srv_resolver.py b/pymongo/srv_resolver.py index 6f6cc285fa..5be6cb98db 100644 --- a/pymongo/srv_resolver.py +++ b/pymongo/srv_resolver.py @@ -99,7 +99,7 @@ def get_options(self) -> Optional[str]: raise ConfigurationError(str(exc)) from None if len(results) > 1: raise ConfigurationError("Only one TXT record is supported") - return (b"&".join([b"".join(res.strings) for res in results])).decode("utf-8") + return (b"&".join([b"".join(res.strings) for res in results])).decode("utf-8") # type: ignore[attr-defined] def _resolve_uri(self, encapsulate_errors: bool) -> resolver.Answer: try: @@ -121,7 +121,8 @@ def _get_srv_response_and_hosts( # Construct address tuples nodes = [ - (maybe_decode(res.target.to_text(omit_final_dot=True)), res.port) for res in results + (maybe_decode(res.target.to_text(omit_final_dot=True)), res.port) # type: ignore[attr-defined] + for res in results ] # Validate hosts From 8f32f3cd245b8bebd2d91469aa7477af2bf5ce38 Mon Sep 17 00:00:00 2001 From: Noah Stapp Date: Tue, 8 Oct 2024 15:52:16 -0400 Subject: [PATCH 013/182] PYTHON-4831 - Remove pytz from examples (#1904) Co-authored-by: Steven Silvester --- doc/examples/datetimes.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/doc/examples/datetimes.rst b/doc/examples/datetimes.rst index 1790506423..a8c0476903 100644 --- a/doc/examples/datetimes.rst +++ b/doc/examples/datetimes.rst @@ -70,9 +70,9 @@ those datetimes to UTC automatically: .. doctest:: - >>> import pytz - >>> pacific = pytz.timezone("US/Pacific") - >>> aware_datetime = pacific.localize(datetime.datetime(2002, 10, 27, 6, 0, 0)) + >>> from zoneinfo import ZoneInfo + >>> from datetime import datetime + >>> aware_datetime = datetime(2002, 10, 27, 6, 0, 0, tzinfo=ZoneInfo("US/Pacific")) >>> result = db.times.insert_one({"date": aware_datetime}) >>> db.times.find_one()["date"] datetime.datetime(2002, 10, 27, 14, 0) @@ -97,7 +97,7 @@ out of MongoDB in US/Pacific time: datetime.datetime(2002, 10, 27, 14, 0) >>> aware_times = db.times.with_options(codec_options=CodecOptions( ... tz_aware=True, - ... tzinfo=pytz.timezone('US/Pacific'))) + ... tzinfo=ZoneInfo("US/Pacific"))) >>> result = aware_times.find_one()['date'] datetime.datetime(2002, 10, 27, 6, 0, # doctest: +NORMALIZE_WHITESPACE tzinfo=) From 5fa4380324b7109edce24ad1cd97f3eec6bc7697 Mon Sep 17 00:00:00 2001 From: Noah Stapp Date: Wed, 9 Oct 2024 10:44:41 -0400 Subject: [PATCH 014/182] PYTHON-4784 - Add tests to confirm async parallelism (#1886) --- test/asynchronous/test_concurrency.py | 54 +++++++++++++++++++++++++++ tools/synchro.py | 2 +- 2 files changed, 55 insertions(+), 1 deletion(-) create mode 100644 test/asynchronous/test_concurrency.py diff --git a/test/asynchronous/test_concurrency.py b/test/asynchronous/test_concurrency.py new file mode 100644 index 0000000000..1683b8413b --- /dev/null +++ b/test/asynchronous/test_concurrency.py @@ -0,0 +1,54 @@ +# Copyright 2024-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Tests to ensure that the async API is properly concurrent with asyncio.""" +from __future__ import annotations + +import asyncio +import time +from test.asynchronous import AsyncIntegrationTest, async_client_context +from test.utils import delay + +_IS_SYNC = False + + +class TestAsyncConcurrency(AsyncIntegrationTest): + async def _task(self, client): + await client.db.test.find_one({"$where": delay(0.20)}) + + async def test_concurrency(self): + tasks = [] + iterations = 5 + + client = await self.async_single_client() + await client.db.test.drop() + await client.db.test.insert_one({"x": 1}) + + start = time.time() + + for _ in range(iterations): + await self._task(client) + + sequential_time = time.time() - start + start = time.time() + + for i in range(iterations): + tasks.append(self._task(client)) + + await asyncio.gather(*tasks) + concurrent_time = time.time() - start + + percent_faster = (sequential_time - concurrent_time) / concurrent_time * 100 + # We expect the concurrent tasks to be at least 75% faster on all platforms as a conservative benchmark + self.assertGreaterEqual(percent_faster, 75) diff --git a/tools/synchro.py b/tools/synchro.py index 585fc5fefd..5ce83cfbeb 100644 --- a/tools/synchro.py +++ b/tools/synchro.py @@ -152,7 +152,7 @@ def async_only_test(f: str) -> bool: """Return True for async tests that should not be converted to sync.""" - return f in ["test_locks.py"] + return f in ["test_locks.py", "test_concurrency.py"] test_files = [ From ac198af557410bce4809138c0089e1e56ff6db87 Mon Sep 17 00:00:00 2001 From: Shane Harvey Date: Wed, 9 Oct 2024 09:58:20 -0700 Subject: [PATCH 015/182] PYTHON-4729 Drop support for MongoDB 3.6 (#1905) --- .evergreen/config.yml | 52 ++++--------------- README.md | 2 +- doc/changelog.rst | 15 ++++++ doc/common-issues.rst | 6 +-- doc/examples/authentication.rst | 5 +- pymongo/asynchronous/collection.py | 5 -- pymongo/asynchronous/mongo_client.py | 5 +- pymongo/common.py | 4 +- pymongo/synchronous/collection.py | 5 -- pymongo/synchronous/mongo_client.py | 5 +- test/asynchronous/test_client.py | 8 +-- .../rs/null_election_id-pre-6.0.json | 2 +- test/mockupdb/test_auth_recovering_member.py | 4 +- test/mockupdb/test_cluster_time.py | 7 ++- test/mockupdb/test_cursor.py | 3 +- test/mockupdb/test_cursor_namespace.py | 5 +- test/mockupdb/test_getmore_sharded.py | 8 ++- test/mockupdb/test_handshake.py | 23 ++++++-- test/mockupdb/test_initial_ismaster.py | 10 ++-- test/mockupdb/test_list_indexes.py | 3 +- test/mockupdb/test_max_staleness.py | 6 ++- test/mockupdb/test_mixed_version_sharded.py | 2 - .../mockupdb/test_mongos_command_read_mode.py | 13 ++++- .../test_network_disconnect_primary.py | 9 +++- test/mockupdb/test_op_msg_read_preference.py | 9 ++-- test/mockupdb/test_query_read_pref_sharded.py | 8 ++- test/mockupdb/test_reset_and_request_check.py | 6 ++- test/mockupdb/test_slave_okay_rs.py | 10 +++- test/mockupdb/test_slave_okay_sharded.py | 7 ++- test/mockupdb/test_slave_okay_single.py | 3 +- test/test_client.py | 8 +-- test/test_discovery_and_monitoring.py | 7 ++- test/test_server_description.py | 4 +- test/test_topology.py | 50 +++++++++++++----- test/utils_selection_tests.py | 4 +- 35 files changed, 191 insertions(+), 132 deletions(-) diff --git a/.evergreen/config.yml b/.evergreen/config.yml index 7fb48c8054..a345e4f5b7 100644 --- a/.evergreen/config.yml +++ b/.evergreen/config.yml @@ -1014,33 +1014,6 @@ tasks: TOPOLOGY: "server" - func: "run doctests" - - name: "test-3.6-standalone" - tags: ["3.6", "standalone"] - commands: - - func: "bootstrap mongo-orchestration" - vars: - VERSION: "3.6" - TOPOLOGY: "server" - - func: "run tests" - - - name: "test-3.6-replica_set" - tags: ["3.6", "replica_set"] - commands: - - func: "bootstrap mongo-orchestration" - vars: - VERSION: "3.6" - TOPOLOGY: "replica_set" - - func: "run tests" - - - name: "test-3.6-sharded_cluster" - tags: ["3.6", "sharded_cluster"] - commands: - - func: "bootstrap mongo-orchestration" - vars: - VERSION: "3.6" - TOPOLOGY: "sharded_cluster" - - func: "run tests" - - name: "test-4.0-standalone" tags: ["4.0", "standalone"] commands: @@ -2186,10 +2159,6 @@ axes: - id: mongodb-version display_name: "MongoDB" values: - - id: "3.6" - display_name: "MongoDB 3.6" - variables: - VERSION: "3.6" - id: "4.0" display_name: "MongoDB 4.0" variables: @@ -2490,7 +2459,6 @@ buildvariants: - ".4.4" - ".4.2" - ".4.0" - - ".3.6" - matrix_name: "test-macos-arm64" matrix_spec: @@ -2562,7 +2530,6 @@ buildvariants: - ".4.4" - ".4.2" - ".4.0" - - ".3.6" - matrix_name: "tests-pyopenssl" matrix_spec: @@ -2657,19 +2624,22 @@ buildvariants: display_name: "${compression} ${c-extensions} ${python-version} ${platform}" tasks: - "test-latest-standalone" + - "test-8.0-standalone" + - "test-7.0-standalone" + - "test-6.0-standalone" - "test-5.0-standalone" - "test-4.4-standalone" - "test-4.2-standalone" + - "test-4.0-standalone" rules: - # Server versions 3.6 and 4.0 support snappy and zlib. + # Server version 4.0 supports snappy and zlib but not zstd. - if: python-version: "*" c-extensions: "*" - compression: ["snappy", "zlib"] + compression: ["zstd"] then: - add_tasks: + remove_tasks: - "test-4.0-standalone" - - "test-3.6-standalone" - matrix_name: "tests-python-version-green-framework-rhel8" matrix_spec: @@ -2734,7 +2704,7 @@ buildvariants: matrix_spec: platform: rhel8 storage-engine: "*" - python-version: 3.9 + python-version: "3.9" display_name: "Storage ${storage-engine} ${python-version} ${platform}" rules: - if: @@ -2751,7 +2721,6 @@ buildvariants: - "test-4.4-standalone" - "test-4.2-standalone" - "test-4.0-standalone" - - "test-3.6-standalone" - if: # MongoDB 4.2 drops support for MMAPv1 platform: rhel8 @@ -2761,8 +2730,6 @@ buildvariants: add_tasks: - "test-4.0-standalone" - "test-4.0-replica_set" - - "test-3.6-standalone" - - "test-3.6-replica_set" # enableTestCommands=0 tests on RHEL 8.4 (x86_64) with Python 3.9. - matrix_name: "test-disableTestCommands" @@ -2881,6 +2848,9 @@ buildvariants: tasks: # Versioned API was introduced in MongoDB 4.7 - "test-latest-standalone" + - "test-8.0-standalone" + - "test-7.0-standalone" + - "test-6.0-standalone" - "test-5.0-standalone" - matrix_name: "ocsp-test" diff --git a/README.md b/README.md index 9b5aa33f78..f5e2cdf46d 100644 --- a/README.md +++ b/README.md @@ -14,7 +14,7 @@ a native Python driver for MongoDB. The `gridfs` package is a [gridfs](https://github.com/mongodb/specifications/blob/master/source/gridfs/gridfs-spec.rst/) implementation on top of `pymongo`. -PyMongo supports MongoDB 3.6, 4.0, 4.2, 4.4, 5.0, 6.0, 7.0, and 8.0. +PyMongo supports MongoDB 4.0, 4.2, 4.4, 5.0, 6.0, 7.0, and 8.0. ## Support / Feedback diff --git a/doc/changelog.rst b/doc/changelog.rst index a73a89a0ef..6a118f56ca 100644 --- a/doc/changelog.rst +++ b/doc/changelog.rst @@ -5,6 +5,21 @@ Changes in Version 4.11.0 ------------------------- .. warning:: PyMongo 4.11 drops support for Python 3.8: Python 3.9+ or PyPy 3.9+ is now required. +.. warning:: PyMongo 4.11 drops support for MongoDB 3.6. PyMongo now supports MongoDB 4.0+. + Driver support for MongoDB 3.6 reached end of life in April 2024. + +PyMongo 4.11 brings a number of changes including: + +- Dropped support for Python 3.8. +- Dropped support for MongoDB 3.6. + +Issues Resolved +............... + +See the `PyMongo 4.11 release notes in JIRA`_ for the list of resolved issues +in this release. + +.. _PyMongo 4.11 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=40784 Changes in Version 4.10.1 ------------------------- diff --git a/doc/common-issues.rst b/doc/common-issues.rst index 3d2d06a5a7..b300bac784 100644 --- a/doc/common-issues.rst +++ b/doc/common-issues.rst @@ -6,14 +6,14 @@ Also see the :ref:`TLSErrors` section. Server reports wire version X, PyMongo requires Y ------------------------------------------------- -When one attempts to connect to a <=3.4 version server, PyMongo will throw the following error:: +When one attempts to connect to a <=3.6 version server, PyMongo will throw the following error:: >>> client.admin.command('ping') ... - pymongo.errors.ConfigurationError: Server at localhost:27017 reports wire version 5, but this version of PyMongo requires at least 6 (MongoDB 3.6). + pymongo.errors.ConfigurationError: Server at localhost:27017 reports wire version 6, but this version of PyMongo requires at least 7 (MongoDB 4.0). This is caused by the driver being too new for the server it is being run against. -To resolve this issue either upgrade your database to version >= 3.6 or downgrade to PyMongo 3.x which supports MongoDB >= 2.6. +To resolve this issue either upgrade your database to version >= 4.0 or downgrade to an early version of PyMongo which supports MongoDB < 4.0. 'Cursor' object has no attribute '_Cursor__killed' diff --git a/doc/examples/authentication.rst b/doc/examples/authentication.rst index 24b3cff8df..6c89910f3c 100644 --- a/doc/examples/authentication.rst +++ b/doc/examples/authentication.rst @@ -97,9 +97,8 @@ the "MongoDB Challenge-Response" protocol:: Default Authentication Mechanism -------------------------------- -If no mechanism is specified, PyMongo automatically SCRAM-SHA-1 when connected -to MongoDB 3.6 and negotiates the mechanism to use (SCRAM-SHA-1 -or SCRAM-SHA-256) when connected to MongoDB 4.0+. +If no mechanism is specified, PyMongo automatically negotiates the mechanism to use (SCRAM-SHA-1 +or SCRAM-SHA-256) with the MongoDB server. Default Database and "authSource" --------------------------------- diff --git a/pymongo/asynchronous/collection.py b/pymongo/asynchronous/collection.py index 5abc41a7e0..4ddcbab4d2 100644 --- a/pymongo/asynchronous/collection.py +++ b/pymongo/asynchronous/collection.py @@ -1960,20 +1960,15 @@ async def _count_cmd( collation: Optional[Collation], ) -> int: """Internal count command helper.""" - # XXX: "ns missing" checks can be removed when we drop support for - # MongoDB 3.0, see SERVER-17051. res = await self._command( conn, cmd, read_preference=read_preference, - allowable_errors=["ns missing"], codec_options=self._write_response_codec_options, read_concern=self.read_concern, collation=collation, session=session, ) - if res.get("errmsg", "") == "ns missing": - return 0 return int(res["n"]) async def _aggregate_one_result( diff --git a/pymongo/asynchronous/mongo_client.py b/pymongo/asynchronous/mongo_client.py index 814c604562..bfae302dac 100644 --- a/pymongo/asynchronous/mongo_client.py +++ b/pymongo/asynchronous/mongo_client.py @@ -498,9 +498,8 @@ def __init__( - `authSource`: The database to authenticate on. Defaults to the database specified in the URI, if provided, or to "admin". - `authMechanism`: See :data:`~pymongo.auth.MECHANISMS` for options. - If no mechanism is specified, PyMongo automatically SCRAM-SHA-1 - when connected to MongoDB 3.6 and negotiates the mechanism to use - (SCRAM-SHA-1 or SCRAM-SHA-256) when connected to MongoDB 4.0+. + If no mechanism is specified, PyMongo automatically negotiates the + mechanism to use (SCRAM-SHA-1 or SCRAM-SHA-256) with the MongoDB server. - `authMechanismProperties`: Used to specify authentication mechanism specific options. To specify the service name for GSSAPI authentication pass authMechanismProperties='SERVICE_NAME: int: """Internal count command helper.""" - # XXX: "ns missing" checks can be removed when we drop support for - # MongoDB 3.0, see SERVER-17051. res = self._command( conn, cmd, read_preference=read_preference, - allowable_errors=["ns missing"], codec_options=self._write_response_codec_options, read_concern=self.read_concern, collation=collation, session=session, ) - if res.get("errmsg", "") == "ns missing": - return 0 return int(res["n"]) def _aggregate_one_result( diff --git a/pymongo/synchronous/mongo_client.py b/pymongo/synchronous/mongo_client.py index b2dff5b4ab..1351cb200f 100644 --- a/pymongo/synchronous/mongo_client.py +++ b/pymongo/synchronous/mongo_client.py @@ -496,9 +496,8 @@ def __init__( - `authSource`: The database to authenticate on. Defaults to the database specified in the URI, if provided, or to "admin". - `authMechanism`: See :data:`~pymongo.auth.MECHANISMS` for options. - If no mechanism is specified, PyMongo automatically SCRAM-SHA-1 - when connected to MongoDB 3.6 and negotiates the mechanism to use - (SCRAM-SHA-1 or SCRAM-SHA-256) when connected to MongoDB 4.0+. + If no mechanism is specified, PyMongo automatically negotiates the + mechanism to use (SCRAM-SHA-1 or SCRAM-SHA-256) with the MongoDB server. - `authMechanismProperties`: Used to specify authentication mechanism specific options. To specify the service name for GSSAPI authentication pass authMechanismProperties='SERVICE_NAME: Date: Wed, 9 Oct 2024 14:07:44 -0500 Subject: [PATCH 016/182] PYTHON-4818 Use OCSP Scripts from Drivers-Tools (#1895) --- .evergreen/config.yml | 121 ++++++++++++++++-------------------------- 1 file changed, 47 insertions(+), 74 deletions(-) diff --git a/.evergreen/config.yml b/.evergreen/config.yml index a345e4f5b7..1ef8751501 100644 --- a/.evergreen/config.yml +++ b/.evergreen/config.yml @@ -651,63 +651,16 @@ functions: CA_FILE="${DRIVERS_TOOLS}/.evergreen/ocsp/${OCSP_ALGORITHM}/ca.pem" \ OCSP_TLS_SHOULD_SUCCEED="${OCSP_TLS_SHOULD_SUCCEED}" \ bash ${PROJECT_DIRECTORY}/.evergreen/hatch.sh test:test-eg + bash ${DRIVERS_TOOLS}/.evergreen/ocsp/teardown.sh - run-valid-ocsp-server: - - command: shell.exec - params: - background: true - script: | - . src/.evergreen/scripts/env.sh - cd ${DRIVERS_TOOLS}/.evergreen/ocsp - . ./activate-ocspvenv.sh - python ocsp_mock.py \ - --ca_file ${OCSP_ALGORITHM}/ca.pem \ - --ocsp_responder_cert ${OCSP_ALGORITHM}/ca.crt \ - --ocsp_responder_key ${OCSP_ALGORITHM}/ca.key \ - -p 8100 -v - run-revoked-ocsp-server: - - command: shell.exec - params: - background: true - script: | - . src/.evergreen/scripts/env.sh - cd ${DRIVERS_TOOLS}/.evergreen/ocsp - . ./activate-ocspvenv.sh - python ocsp_mock.py \ - --ca_file ${OCSP_ALGORITHM}/ca.pem \ - --ocsp_responder_cert ${OCSP_ALGORITHM}/ca.crt \ - --ocsp_responder_key ${OCSP_ALGORITHM}/ca.key \ - -p 8100 \ - -v \ - --fault revoked - run-valid-delegate-ocsp-server: - - command: shell.exec - params: - background: true - script: | - . src/.evergreen/scripts/env.sh - cd ${DRIVERS_TOOLS}/.evergreen/ocsp - . ./activate-ocspvenv.sh - python ocsp_mock.py \ - --ca_file ${OCSP_ALGORITHM}/ca.pem \ - --ocsp_responder_cert ${OCSP_ALGORITHM}/ocsp-responder.crt \ - --ocsp_responder_key ${OCSP_ALGORITHM}/ocsp-responder.key \ - -p 8100 -v - run-revoked-delegate-ocsp-server: - - command: shell.exec + "run-ocsp-server": + - command: subprocess.exec params: background: true - script: | - . src/.evergreen/scripts/env.sh - cd ${DRIVERS_TOOLS}/.evergreen/ocsp - . ./activate-ocspvenv.sh - python ocsp_mock.py \ - --ca_file ${OCSP_ALGORITHM}/ca.pem \ - --ocsp_responder_cert ${OCSP_ALGORITHM}/ocsp-responder.crt \ - --ocsp_responder_key ${OCSP_ALGORITHM}/ocsp-responder.key \ - -p 8100 \ - -v \ - --fault revoked + binary: bash + include_expansions_in_env: [SERVER_TYPE, OCSP_ALGORITHM] + args: + - ${DRIVERS_TOOLS}/.evergreen/ocsp/setup.sh "run load-balancer": - command: shell.exec @@ -1360,9 +1313,10 @@ tasks: - name: test-ocsp-rsa-valid-cert-server-staples tags: ["ocsp", "ocsp-rsa", "ocsp-staple"] commands: - - func: run-valid-ocsp-server + - func: run-ocsp-server vars: OCSP_ALGORITHM: "rsa" + SERVER_TYPE: "valid" - func: "bootstrap mongo-orchestration" vars: ORCHESTRATION_FILE: "rsa-basic-tls-ocsp-mustStaple.json" @@ -1374,9 +1328,10 @@ tasks: - name: test-ocsp-rsa-invalid-cert-server-staples tags: ["ocsp", "ocsp-rsa", "ocsp-staple"] commands: - - func: run-revoked-ocsp-server + - func: run-ocsp-server vars: OCSP_ALGORITHM: "rsa" + SERVER_TYPE: "revoked" - func: "bootstrap mongo-orchestration" vars: ORCHESTRATION_FILE: "rsa-basic-tls-ocsp-mustStaple.json" @@ -1388,9 +1343,10 @@ tasks: - name: test-ocsp-rsa-valid-cert-server-does-not-staple tags: ["ocsp", "ocsp-rsa"] commands: - - func: run-valid-ocsp-server + - func: run-ocsp-server vars: OCSP_ALGORITHM: "rsa" + SERVER_TYPE: valid - func: "bootstrap mongo-orchestration" vars: ORCHESTRATION_FILE: "rsa-basic-tls-ocsp-disableStapling.json" @@ -1402,9 +1358,10 @@ tasks: - name: test-ocsp-rsa-invalid-cert-server-does-not-staple tags: ["ocsp", "ocsp-rsa"] commands: - - func: run-revoked-ocsp-server + - func: run-ocsp-server vars: OCSP_ALGORITHM: "rsa" + SERVER_TYPE: revoked - func: "bootstrap mongo-orchestration" vars: ORCHESTRATION_FILE: "rsa-basic-tls-ocsp-disableStapling.json" @@ -1427,9 +1384,10 @@ tasks: - name: test-ocsp-rsa-malicious-invalid-cert-mustStaple-server-does-not-staple tags: ["ocsp", "ocsp-rsa"] commands: - - func: run-revoked-ocsp-server + - func: run-ocsp-server vars: OCSP_ALGORITHM: "rsa" + SERVER_TYPE: revoked - func: "bootstrap mongo-orchestration" vars: ORCHESTRATION_FILE: "rsa-basic-tls-ocsp-mustStaple-disableStapling.json" @@ -1452,9 +1410,10 @@ tasks: - name: test-ocsp-rsa-delegate-valid-cert-server-staples tags: ["ocsp", "ocsp-rsa", "ocsp-staple"] commands: - - func: run-valid-delegate-ocsp-server + - func: run-ocsp-server vars: OCSP_ALGORITHM: "rsa" + SERVER_TYPE: valid-delegate - func: "bootstrap mongo-orchestration" vars: ORCHESTRATION_FILE: "rsa-basic-tls-ocsp-mustStaple.json" @@ -1466,9 +1425,10 @@ tasks: - name: test-ocsp-rsa-delegate-invalid-cert-server-staples tags: ["ocsp", "ocsp-rsa", "ocsp-staple"] commands: - - func: run-revoked-delegate-ocsp-server + - func: run-ocsp-server vars: OCSP_ALGORITHM: "rsa" + SERVER_TYPE: revoked-delegate - func: "bootstrap mongo-orchestration" vars: ORCHESTRATION_FILE: "rsa-basic-tls-ocsp-mustStaple.json" @@ -1480,9 +1440,10 @@ tasks: - name: test-ocsp-rsa-delegate-valid-cert-server-does-not-staple tags: ["ocsp", "ocsp-rsa"] commands: - - func: run-valid-delegate-ocsp-server + - func: run-ocsp-server vars: OCSP_ALGORITHM: "rsa" + SERVER_TYPE: valid-delegate - func: "bootstrap mongo-orchestration" vars: ORCHESTRATION_FILE: "rsa-basic-tls-ocsp-disableStapling.json" @@ -1494,9 +1455,10 @@ tasks: - name: test-ocsp-rsa-delegate-invalid-cert-server-does-not-staple tags: ["ocsp", "ocsp-rsa"] commands: - - func: run-revoked-delegate-ocsp-server + - func: run-ocsp-server vars: OCSP_ALGORITHM: "rsa" + SERVER_TYPE: revoked-delegate - func: "bootstrap mongo-orchestration" vars: ORCHESTRATION_FILE: "rsa-basic-tls-ocsp-disableStapling.json" @@ -1508,9 +1470,10 @@ tasks: - name: test-ocsp-rsa-delegate-malicious-invalid-cert-mustStaple-server-does-not-staple tags: ["ocsp", "ocsp-rsa"] commands: - - func: run-revoked-delegate-ocsp-server + - func: run-ocsp-server vars: OCSP_ALGORITHM: "rsa" + SERVER_TYPE: revoked-delegate - func: "bootstrap mongo-orchestration" vars: ORCHESTRATION_FILE: "rsa-basic-tls-ocsp-mustStaple-disableStapling.json" @@ -1522,9 +1485,10 @@ tasks: - name: test-ocsp-ecdsa-valid-cert-server-staples tags: ["ocsp", "ocsp-ecdsa", "ocsp-staple"] commands: - - func: run-valid-ocsp-server + - func: run-ocsp-server vars: OCSP_ALGORITHM: "ecdsa" + SERVER_TYPE: valid - func: "bootstrap mongo-orchestration" vars: ORCHESTRATION_FILE: "ecdsa-basic-tls-ocsp-mustStaple.json" @@ -1536,9 +1500,10 @@ tasks: - name: test-ocsp-ecdsa-invalid-cert-server-staples tags: ["ocsp", "ocsp-ecdsa", "ocsp-staple"] commands: - - func: run-revoked-ocsp-server + - func: run-ocsp-server vars: OCSP_ALGORITHM: "ecdsa" + SERVER_TYPE: revoked - func: "bootstrap mongo-orchestration" vars: ORCHESTRATION_FILE: "ecdsa-basic-tls-ocsp-mustStaple.json" @@ -1550,9 +1515,10 @@ tasks: - name: test-ocsp-ecdsa-valid-cert-server-does-not-staple tags: ["ocsp", "ocsp-ecdsa"] commands: - - func: run-valid-ocsp-server + - func: run-ocsp-server vars: OCSP_ALGORITHM: "ecdsa" + SERVER_TYPE: valid - func: "bootstrap mongo-orchestration" vars: ORCHESTRATION_FILE: "ecdsa-basic-tls-ocsp-disableStapling.json" @@ -1564,9 +1530,10 @@ tasks: - name: test-ocsp-ecdsa-invalid-cert-server-does-not-staple tags: ["ocsp", "ocsp-ecdsa"] commands: - - func: run-revoked-ocsp-server + - func: run-ocsp-server vars: OCSP_ALGORITHM: "ecdsa" + SERVER_TYPE: revoked - func: "bootstrap mongo-orchestration" vars: ORCHESTRATION_FILE: "ecdsa-basic-tls-ocsp-disableStapling.json" @@ -1589,9 +1556,10 @@ tasks: - name: test-ocsp-ecdsa-malicious-invalid-cert-mustStaple-server-does-not-staple tags: ["ocsp", "ocsp-ecdsa"] commands: - - func: run-revoked-ocsp-server + - func: run-ocsp-server vars: OCSP_ALGORITHM: "ecdsa" + SERVER_TYPE: revoked - func: "bootstrap mongo-orchestration" vars: ORCHESTRATION_FILE: "ecdsa-basic-tls-ocsp-mustStaple-disableStapling.json" @@ -1614,9 +1582,10 @@ tasks: - name: test-ocsp-ecdsa-delegate-valid-cert-server-staples tags: ["ocsp", "ocsp-ecdsa", "ocsp-staple"] commands: - - func: run-valid-delegate-ocsp-server + - func: run-ocsp-server vars: OCSP_ALGORITHM: "ecdsa" + SERVER_TYPE: valid-delegate - func: "bootstrap mongo-orchestration" vars: ORCHESTRATION_FILE: "ecdsa-basic-tls-ocsp-mustStaple.json" @@ -1628,9 +1597,10 @@ tasks: - name: test-ocsp-ecdsa-delegate-invalid-cert-server-staples tags: ["ocsp", "ocsp-ecdsa", "ocsp-staple"] commands: - - func: run-revoked-delegate-ocsp-server + - func: run-ocsp-server vars: OCSP_ALGORITHM: "ecdsa" + SERVER_TYPE: revoked-delegate - func: "bootstrap mongo-orchestration" vars: ORCHESTRATION_FILE: "ecdsa-basic-tls-ocsp-mustStaple.json" @@ -1642,9 +1612,10 @@ tasks: - name: test-ocsp-ecdsa-delegate-valid-cert-server-does-not-staple tags: ["ocsp", "ocsp-ecdsa"] commands: - - func: run-valid-delegate-ocsp-server + - func: run-ocsp-server vars: OCSP_ALGORITHM: "ecdsa" + SERVER_TYPE: valid-delegate - func: "bootstrap mongo-orchestration" vars: ORCHESTRATION_FILE: "ecdsa-basic-tls-ocsp-disableStapling.json" @@ -1656,9 +1627,10 @@ tasks: - name: test-ocsp-ecdsa-delegate-invalid-cert-server-does-not-staple tags: ["ocsp", "ocsp-ecdsa"] commands: - - func: run-revoked-delegate-ocsp-server + - func: run-ocsp-server vars: OCSP_ALGORITHM: "ecdsa" + SERVER_TYPE: revoked-delegate - func: "bootstrap mongo-orchestration" vars: ORCHESTRATION_FILE: "ecdsa-basic-tls-ocsp-disableStapling.json" @@ -1670,9 +1642,10 @@ tasks: - name: test-ocsp-ecdsa-delegate-malicious-invalid-cert-mustStaple-server-does-not-staple tags: ["ocsp", "ocsp-ecdsa"] commands: - - func: run-revoked-delegate-ocsp-server + - func: run-ocsp-server vars: OCSP_ALGORITHM: "ecdsa" + SERVER_TYPE: valid-delegate - func: "bootstrap mongo-orchestration" vars: ORCHESTRATION_FILE: "ecdsa-basic-tls-ocsp-mustStaple-disableStapling.json" From 8f26f43911ecb7cf9973040807d004c69ed88eb2 Mon Sep 17 00:00:00 2001 From: Shane Harvey Date: Thu, 10 Oct 2024 09:01:27 -0700 Subject: [PATCH 017/182] PYTHON-4450 Support free-threaded Python 3.13t with no-GIL (#1906) --- .github/workflows/dist.yml | 5 ++++- .github/workflows/test-python.yml | 23 +++++++++++++++++++---- bson/_cbsonmodule.c | 3 +++ doc/changelog.rst | 2 ++ pymongo/_cmessagemodule.c | 3 +++ pyproject.toml | 2 ++ test/asynchronous/test_client_context.py | 7 +++++++ test/test_client_context.py | 7 +++++++ 8 files changed, 47 insertions(+), 5 deletions(-) diff --git a/.github/workflows/dist.yml b/.github/workflows/dist.yml index fbc7ff7390..858d269e08 100644 --- a/.github/workflows/dist.yml +++ b/.github/workflows/dist.yml @@ -67,7 +67,7 @@ jobs: # Note: the default manylinux is manylinux2014 run: | python -m pip install -U pip - python -m pip install "cibuildwheel>=2.17,<3" + python -m pip install "cibuildwheel>=2.20,<3" - name: Build wheels env: @@ -89,6 +89,9 @@ jobs: ls wheelhouse/*cp310*.whl ls wheelhouse/*cp311*.whl ls wheelhouse/*cp312*.whl + ls wheelhouse/*cp313*.whl + # Free-threading builds: + ls wheelhouse/*cp313t*.whl - uses: actions/upload-artifact@v4 with: diff --git a/.github/workflows/test-python.yml b/.github/workflows/test-python.yml index e55444ceca..40991440d3 100644 --- a/.github/workflows/test-python.yml +++ b/.github/workflows/test-python.yml @@ -51,11 +51,18 @@ jobs: strategy: matrix: os: [ubuntu-20.04] - python-version: ["3.9", "pypy-3.9", "3.13"] + python-version: ["3.9", "pypy-3.9", "3.13", "3.13t"] name: CPython ${{ matrix.python-version }}-${{ matrix.os }} steps: - uses: actions/checkout@v4 - - name: Setup Python + - if: ${{ matrix.python-version == '3.13t' }} + name: Setup free-threaded Python + uses: deadsnakes/action@v3.2.0 + with: + python-version: 3.13 + nogil: true + - if: ${{ matrix.python-version != '3.13t' }} + name: Setup Python uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} @@ -65,9 +72,13 @@ jobs: - name: Install dependencies run: | pip install -U pip - if [ "${{ matrix.python-version }}" == "3.13" ]; then + if [[ "${{ matrix.python-version }}" == "3.13" ]]; then pip install --pre cffi setuptools pip install --no-build-isolation hatch + elif [[ "${{ matrix.python-version }}" == "3.13t" ]]; then + # Hatch can't be installed on 3.13t, use pytest directly. + pip install . + pip install -r requirements/test.txt else pip install hatch fi @@ -77,7 +88,11 @@ jobs: mongodb-version: 6.0 - name: Run tests run: | - hatch run test:test + if [[ "${{ matrix.python-version }}" == "3.13t" ]]; then + pytest -v --durations=5 --maxfail=10 + else + hatch run test:test + fi doctest: runs-on: ubuntu-latest diff --git a/bson/_cbsonmodule.c b/bson/_cbsonmodule.c index 223c392280..a66071c285 100644 --- a/bson/_cbsonmodule.c +++ b/bson/_cbsonmodule.c @@ -3184,6 +3184,9 @@ static PyModuleDef_Slot _cbson_slots[] = { {Py_mod_exec, _cbson_exec}, #if defined(Py_MOD_MULTIPLE_INTERPRETERS_SUPPORTED) {Py_mod_multiple_interpreters, Py_MOD_MULTIPLE_INTERPRETERS_SUPPORTED}, +#endif +#if PY_VERSION_HEX >= 0x030D0000 + {Py_mod_gil, Py_MOD_GIL_NOT_USED}, #endif {0, NULL}, }; diff --git a/doc/changelog.rst b/doc/changelog.rst index 6a118f56ca..e7b160b176 100644 --- a/doc/changelog.rst +++ b/doc/changelog.rst @@ -12,6 +12,8 @@ PyMongo 4.11 brings a number of changes including: - Dropped support for Python 3.8. - Dropped support for MongoDB 3.6. +- Added support for free-threaded Python with the GIL disabled. For more information see: + `Free-threaded CPython `_. Issues Resolved ............... diff --git a/pymongo/_cmessagemodule.c b/pymongo/_cmessagemodule.c index b5adbeec32..eb457b341c 100644 --- a/pymongo/_cmessagemodule.c +++ b/pymongo/_cmessagemodule.c @@ -1022,6 +1022,9 @@ static PyModuleDef_Slot _cmessage_slots[] = { {Py_mod_exec, _cmessage_exec}, #ifdef Py_MOD_MULTIPLE_INTERPRETERS_SUPPORTED {Py_mod_multiple_interpreters, Py_MOD_MULTIPLE_INTERPRETERS_SUPPORTED}, +#endif +#if PY_VERSION_HEX >= 0x030D0000 + {Py_mod_gil, Py_MOD_GIL_NOT_USED}, #endif {0, NULL}, }; diff --git a/pyproject.toml b/pyproject.toml index 2688aab27e..b4f59f67d5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -236,6 +236,8 @@ partial_branches = ["if (.*and +)*not _use_c( and.*)*:"] directory = "htmlcov" [tool.cibuildwheel] +# Enable free-threaded support +free-threaded-support = true skip = "pp* *-musllinux*" build-frontend = "build" test-command = "python {project}/tools/fail_if_no_c.py" diff --git a/test/asynchronous/test_client_context.py b/test/asynchronous/test_client_context.py index a0cb53a14f..6d77818436 100644 --- a/test/asynchronous/test_client_context.py +++ b/test/asynchronous/test_client_context.py @@ -61,6 +61,13 @@ def test_setdefaultencoding_worked(self): self.assertEqual(sys.getdefaultencoding(), os.environ["SETDEFAULTENCODING"]) + def test_free_threading_is_enabled(self): + if "free-threading build" not in sys.version: + raise SkipTest("this test requires the Python free-threading build") + + # If the GIL is enabled then pymongo or one of our deps does not support free-threading. + self.assertFalse(sys._is_gil_enabled()) # type: ignore[attr-defined] + if __name__ == "__main__": unittest.main() diff --git a/test/test_client_context.py b/test/test_client_context.py index be8a562142..5996f9243b 100644 --- a/test/test_client_context.py +++ b/test/test_client_context.py @@ -61,6 +61,13 @@ def test_setdefaultencoding_worked(self): self.assertEqual(sys.getdefaultencoding(), os.environ["SETDEFAULTENCODING"]) + def test_free_threading_is_enabled(self): + if "free-threading build" not in sys.version: + raise SkipTest("this test requires the Python free-threading build") + + # If the GIL is enabled then pymongo or one of our deps does not support free-threading. + self.assertFalse(sys._is_gil_enabled()) # type: ignore[attr-defined] + if __name__ == "__main__": unittest.main() From d1e4167dc96ee71bd3b0e0e93239b416d35795f9 Mon Sep 17 00:00:00 2001 From: Noah Stapp Date: Thu, 10 Oct 2024 13:47:14 -0400 Subject: [PATCH 018/182] PYTHON-4841 - Convert test.test_common to async (#1912) --- test/asynchronous/test_common.py | 185 +++++++++++++++++++++++++++++++ test/test_common.py | 52 +++++---- tools/synchro.py | 1 + 3 files changed, 214 insertions(+), 24 deletions(-) create mode 100644 test/asynchronous/test_common.py diff --git a/test/asynchronous/test_common.py b/test/asynchronous/test_common.py new file mode 100644 index 0000000000..00495e7c30 --- /dev/null +++ b/test/asynchronous/test_common.py @@ -0,0 +1,185 @@ +# Copyright 2011-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Test the pymongo common module.""" +from __future__ import annotations + +import sys +import uuid + +sys.path[0:0] = [""] + +from test.asynchronous import AsyncIntegrationTest, async_client_context, connected, unittest + +from bson.binary import PYTHON_LEGACY, STANDARD, Binary, UuidRepresentation +from bson.codec_options import CodecOptions +from bson.objectid import ObjectId +from pymongo.errors import OperationFailure +from pymongo.write_concern import WriteConcern + +_IS_SYNC = False + + +class TestCommon(AsyncIntegrationTest): + async def test_uuid_representation(self): + coll = self.db.uuid + await coll.drop() + + # Test property + self.assertEqual(UuidRepresentation.UNSPECIFIED, coll.codec_options.uuid_representation) + + # Test basic query + uu = uuid.uuid4() + # Insert as binary subtype 3 + coll = self.db.get_collection("uuid", CodecOptions(uuid_representation=PYTHON_LEGACY)) + legacy_opts = coll.codec_options + await coll.insert_one({"uu": uu}) + self.assertEqual(uu, (await coll.find_one({"uu": uu}))["uu"]) # type: ignore + coll = self.db.get_collection("uuid", CodecOptions(uuid_representation=STANDARD)) + self.assertEqual(STANDARD, coll.codec_options.uuid_representation) + self.assertEqual(None, await coll.find_one({"uu": uu})) + uul = Binary.from_uuid(uu, PYTHON_LEGACY) + self.assertEqual(uul, (await coll.find_one({"uu": uul}))["uu"]) # type: ignore + + # Test count_documents + self.assertEqual(0, await coll.count_documents({"uu": uu})) + coll = self.db.get_collection("uuid", CodecOptions(uuid_representation=PYTHON_LEGACY)) + self.assertEqual(1, await coll.count_documents({"uu": uu})) + + # Test delete + coll = self.db.get_collection("uuid", CodecOptions(uuid_representation=STANDARD)) + await coll.delete_one({"uu": uu}) + self.assertEqual(1, await coll.count_documents({})) + coll = self.db.get_collection("uuid", CodecOptions(uuid_representation=PYTHON_LEGACY)) + await coll.delete_one({"uu": uu}) + self.assertEqual(0, await coll.count_documents({})) + + # Test update_one + await coll.insert_one({"_id": uu, "i": 1}) + coll = self.db.get_collection("uuid", CodecOptions(uuid_representation=STANDARD)) + await coll.update_one({"_id": uu}, {"$set": {"i": 2}}) + coll = self.db.get_collection("uuid", CodecOptions(uuid_representation=PYTHON_LEGACY)) + self.assertEqual(1, (await coll.find_one({"_id": uu}))["i"]) # type: ignore + await coll.update_one({"_id": uu}, {"$set": {"i": 2}}) + self.assertEqual(2, (await coll.find_one({"_id": uu}))["i"]) # type: ignore + + # Test Cursor.distinct + self.assertEqual([2], await coll.find({"_id": uu}).distinct("i")) + coll = self.db.get_collection("uuid", CodecOptions(uuid_representation=STANDARD)) + self.assertEqual([], await coll.find({"_id": uu}).distinct("i")) + + # Test findAndModify + self.assertEqual(None, await coll.find_one_and_update({"_id": uu}, {"$set": {"i": 5}})) + coll = self.db.get_collection("uuid", CodecOptions(uuid_representation=PYTHON_LEGACY)) + self.assertEqual(2, (await coll.find_one_and_update({"_id": uu}, {"$set": {"i": 5}}))["i"]) + self.assertEqual(5, (await coll.find_one({"_id": uu}))["i"]) # type: ignore + + # Test command + self.assertEqual( + 5, + ( + await self.db.command( + "findAndModify", + "uuid", + update={"$set": {"i": 6}}, + query={"_id": uu}, + codec_options=legacy_opts, + ) + )["value"]["i"], + ) + self.assertEqual( + 6, + ( + await self.db.command( + "findAndModify", + "uuid", + update={"$set": {"i": 7}}, + query={"_id": Binary.from_uuid(uu, PYTHON_LEGACY)}, + ) + )["value"]["i"], + ) + + async def test_write_concern(self): + c = await self.async_rs_or_single_client(connect=False) + self.assertEqual(WriteConcern(), c.write_concern) + + c = await self.async_rs_or_single_client(connect=False, w=2, wTimeoutMS=1000) + wc = WriteConcern(w=2, wtimeout=1000) + self.assertEqual(wc, c.write_concern) + + # Can we override back to the server default? + db = c.get_database("pymongo_test", write_concern=WriteConcern()) + self.assertEqual(db.write_concern, WriteConcern()) + + db = c.pymongo_test + self.assertEqual(wc, db.write_concern) + coll = db.test + self.assertEqual(wc, coll.write_concern) + + cwc = WriteConcern(j=True) + coll = db.get_collection("test", write_concern=cwc) + self.assertEqual(cwc, coll.write_concern) + self.assertEqual(wc, db.write_concern) + + async def test_mongo_client(self): + pair = await async_client_context.pair + m = await self.async_rs_or_single_client(w=0) + coll = m.pymongo_test.write_concern_test + await coll.drop() + doc = {"_id": ObjectId()} + await coll.insert_one(doc) + self.assertTrue(await coll.insert_one(doc)) + coll = coll.with_options(write_concern=WriteConcern(w=1)) + with self.assertRaises(OperationFailure): + await coll.insert_one(doc) + + m = await self.async_rs_or_single_client() + coll = m.pymongo_test.write_concern_test + new_coll = coll.with_options(write_concern=WriteConcern(w=0)) + self.assertTrue(await new_coll.insert_one(doc)) + with self.assertRaises(OperationFailure): + await coll.insert_one(doc) + + m = await self.async_rs_or_single_client( + f"mongodb://{pair}/", replicaSet=async_client_context.replica_set_name + ) + + coll = m.pymongo_test.write_concern_test + with self.assertRaises(OperationFailure): + await coll.insert_one(doc) + m = await self.async_rs_or_single_client( + f"mongodb://{pair}/?w=0", replicaSet=async_client_context.replica_set_name + ) + + coll = m.pymongo_test.write_concern_test + await coll.insert_one(doc) + + # Equality tests + direct = await connected(await self.async_single_client(w=0)) + direct2 = await connected( + await self.async_single_client(f"mongodb://{pair}/?w=0", **self.credentials) + ) + self.assertEqual(direct, direct2) + self.assertFalse(direct != direct2) + + async def test_validate_boolean(self): + await self.db.test.update_one({}, {"$set": {"total": 1}}, upsert=True) + with self.assertRaisesRegex( + TypeError, "upsert must be True or False, was: upsert={'upsert': True}" + ): + await self.db.test.update_one({}, {"$set": {"total": 1}}, {"upsert": True}) # type: ignore + + +if __name__ == "__main__": + unittest.main() diff --git a/test/test_common.py b/test/test_common.py index 3228dc97fb..e69b421c9f 100644 --- a/test/test_common.py +++ b/test/test_common.py @@ -28,10 +28,7 @@ from pymongo.errors import OperationFailure from pymongo.write_concern import WriteConcern - -@client_context.require_connection -def setUpModule(): - pass +_IS_SYNC = True class TestCommon(IntegrationTest): @@ -48,12 +45,12 @@ def test_uuid_representation(self): coll = self.db.get_collection("uuid", CodecOptions(uuid_representation=PYTHON_LEGACY)) legacy_opts = coll.codec_options coll.insert_one({"uu": uu}) - self.assertEqual(uu, coll.find_one({"uu": uu})["uu"]) # type: ignore + self.assertEqual(uu, (coll.find_one({"uu": uu}))["uu"]) # type: ignore coll = self.db.get_collection("uuid", CodecOptions(uuid_representation=STANDARD)) self.assertEqual(STANDARD, coll.codec_options.uuid_representation) self.assertEqual(None, coll.find_one({"uu": uu})) uul = Binary.from_uuid(uu, PYTHON_LEGACY) - self.assertEqual(uul, coll.find_one({"uu": uul})["uu"]) # type: ignore + self.assertEqual(uul, (coll.find_one({"uu": uul}))["uu"]) # type: ignore # Test count_documents self.assertEqual(0, coll.count_documents({"uu": uu})) @@ -73,9 +70,9 @@ def test_uuid_representation(self): coll = self.db.get_collection("uuid", CodecOptions(uuid_representation=STANDARD)) coll.update_one({"_id": uu}, {"$set": {"i": 2}}) coll = self.db.get_collection("uuid", CodecOptions(uuid_representation=PYTHON_LEGACY)) - self.assertEqual(1, coll.find_one({"_id": uu})["i"]) # type: ignore + self.assertEqual(1, (coll.find_one({"_id": uu}))["i"]) # type: ignore coll.update_one({"_id": uu}, {"$set": {"i": 2}}) - self.assertEqual(2, coll.find_one({"_id": uu})["i"]) # type: ignore + self.assertEqual(2, (coll.find_one({"_id": uu}))["i"]) # type: ignore # Test Cursor.distinct self.assertEqual([2], coll.find({"_id": uu}).distinct("i")) @@ -85,27 +82,31 @@ def test_uuid_representation(self): # Test findAndModify self.assertEqual(None, coll.find_one_and_update({"_id": uu}, {"$set": {"i": 5}})) coll = self.db.get_collection("uuid", CodecOptions(uuid_representation=PYTHON_LEGACY)) - self.assertEqual(2, coll.find_one_and_update({"_id": uu}, {"$set": {"i": 5}})["i"]) - self.assertEqual(5, coll.find_one({"_id": uu})["i"]) # type: ignore + self.assertEqual(2, (coll.find_one_and_update({"_id": uu}, {"$set": {"i": 5}}))["i"]) + self.assertEqual(5, (coll.find_one({"_id": uu}))["i"]) # type: ignore # Test command self.assertEqual( 5, - self.db.command( - "findAndModify", - "uuid", - update={"$set": {"i": 6}}, - query={"_id": uu}, - codec_options=legacy_opts, + ( + self.db.command( + "findAndModify", + "uuid", + update={"$set": {"i": 6}}, + query={"_id": uu}, + codec_options=legacy_opts, + ) )["value"]["i"], ) self.assertEqual( 6, - self.db.command( - "findAndModify", - "uuid", - update={"$set": {"i": 7}}, - query={"_id": Binary.from_uuid(uu, PYTHON_LEGACY)}, + ( + self.db.command( + "findAndModify", + "uuid", + update={"$set": {"i": 7}}, + query={"_id": Binary.from_uuid(uu, PYTHON_LEGACY)}, + ) )["value"]["i"], ) @@ -140,20 +141,23 @@ def test_mongo_client(self): coll.insert_one(doc) self.assertTrue(coll.insert_one(doc)) coll = coll.with_options(write_concern=WriteConcern(w=1)) - self.assertRaises(OperationFailure, coll.insert_one, doc) + with self.assertRaises(OperationFailure): + coll.insert_one(doc) m = self.rs_or_single_client() coll = m.pymongo_test.write_concern_test new_coll = coll.with_options(write_concern=WriteConcern(w=0)) self.assertTrue(new_coll.insert_one(doc)) - self.assertRaises(OperationFailure, coll.insert_one, doc) + with self.assertRaises(OperationFailure): + coll.insert_one(doc) m = self.rs_or_single_client( f"mongodb://{pair}/", replicaSet=client_context.replica_set_name ) coll = m.pymongo_test.write_concern_test - self.assertRaises(OperationFailure, coll.insert_one, doc) + with self.assertRaises(OperationFailure): + coll.insert_one(doc) m = self.rs_or_single_client( f"mongodb://{pair}/?w=0", replicaSet=client_context.replica_set_name ) diff --git a/tools/synchro.py b/tools/synchro.py index 5ce83cfbeb..48c7fc59fd 100644 --- a/tools/synchro.py +++ b/tools/synchro.py @@ -189,6 +189,7 @@ def async_only_test(f: str) -> bool: "test_client_bulk_write.py", "test_client_context.py", "test_collection.py", + "test_common.py", "test_connections_survive_primary_stepdown_spec.py", "test_cursor.py", "test_database.py", From c2338d879b1ccd37eb5d970c8f8be97674a9a252 Mon Sep 17 00:00:00 2001 From: Noah Stapp Date: Thu, 10 Oct 2024 16:38:25 -0400 Subject: [PATCH 019/182] PYTHON-4839 - Convert test.test_collation to async (#1911) --- test/asynchronous/test_collation.py | 290 ++++++++++++++++++++++++++++ test/test_collation.py | 11 +- tools/synchro.py | 1 + 3 files changed, 298 insertions(+), 4 deletions(-) create mode 100644 test/asynchronous/test_collation.py diff --git a/test/asynchronous/test_collation.py b/test/asynchronous/test_collation.py new file mode 100644 index 0000000000..be3ea22e42 --- /dev/null +++ b/test/asynchronous/test_collation.py @@ -0,0 +1,290 @@ +# Copyright 2016-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Test the collation module.""" +from __future__ import annotations + +import functools +import warnings +from test.asynchronous import AsyncIntegrationTest, async_client_context, unittest +from test.utils import EventListener +from typing import Any + +from pymongo.asynchronous.helpers import anext +from pymongo.collation import ( + Collation, + CollationAlternate, + CollationCaseFirst, + CollationMaxVariable, + CollationStrength, +) +from pymongo.errors import ConfigurationError +from pymongo.operations import ( + DeleteMany, + DeleteOne, + IndexModel, + ReplaceOne, + UpdateMany, + UpdateOne, +) +from pymongo.write_concern import WriteConcern + +_IS_SYNC = False + + +class TestCollationObject(unittest.TestCase): + def test_constructor(self): + self.assertRaises(TypeError, Collation, locale=42) + # Fill in a locale to test the other options. + _Collation = functools.partial(Collation, "en_US") + # No error. + _Collation(caseFirst=CollationCaseFirst.UPPER) + self.assertRaises(TypeError, _Collation, caseLevel="true") + self.assertRaises(ValueError, _Collation, strength="six") + self.assertRaises(TypeError, _Collation, numericOrdering="true") + self.assertRaises(TypeError, _Collation, alternate=5) + self.assertRaises(TypeError, _Collation, maxVariable=2) + self.assertRaises(TypeError, _Collation, normalization="false") + self.assertRaises(TypeError, _Collation, backwards="true") + + # No errors. + Collation("en_US", future_option="bar", another_option=42) + collation = Collation( + "en_US", + caseLevel=True, + caseFirst=CollationCaseFirst.UPPER, + strength=CollationStrength.QUATERNARY, + numericOrdering=True, + alternate=CollationAlternate.SHIFTED, + maxVariable=CollationMaxVariable.SPACE, + normalization=True, + backwards=True, + ) + + self.assertEqual( + { + "locale": "en_US", + "caseLevel": True, + "caseFirst": "upper", + "strength": 4, + "numericOrdering": True, + "alternate": "shifted", + "maxVariable": "space", + "normalization": True, + "backwards": True, + }, + collation.document, + ) + + self.assertEqual( + {"locale": "en_US", "backwards": True}, Collation("en_US", backwards=True).document + ) + + +class TestCollation(AsyncIntegrationTest): + listener: EventListener + warn_context: Any + collation: Collation + + @classmethod + @async_client_context.require_connection + async def _setup_class(cls): + await super()._setup_class() + cls.listener = EventListener() + cls.client = await cls.unmanaged_async_rs_or_single_client(event_listeners=[cls.listener]) + cls.db = cls.client.pymongo_test + cls.collation = Collation("en_US") + cls.warn_context = warnings.catch_warnings() + cls.warn_context.__enter__() + warnings.simplefilter("ignore", DeprecationWarning) + + @classmethod + async def _tearDown_class(cls): + cls.warn_context.__exit__() + cls.warn_context = None + await cls.client.close() + await super()._tearDown_class() + + def tearDown(self): + self.listener.reset() + super().tearDown() + + def last_command_started(self): + return self.listener.started_events[-1].command + + def assertCollationInLastCommand(self): + self.assertEqual(self.collation.document, self.last_command_started()["collation"]) + + async def test_create_collection(self): + await self.db.test.drop() + await self.db.create_collection("test", collation=self.collation) + self.assertCollationInLastCommand() + + # Test passing collation as a dict as well. + await self.db.test.drop() + self.listener.reset() + await self.db.create_collection("test", collation=self.collation.document) + self.assertCollationInLastCommand() + + def test_index_model(self): + model = IndexModel([("a", 1), ("b", -1)], collation=self.collation) + self.assertEqual(self.collation.document, model.document["collation"]) + + async def test_create_index(self): + await self.db.test.create_index("foo", collation=self.collation) + ci_cmd = self.listener.started_events[0].command + self.assertEqual(self.collation.document, ci_cmd["indexes"][0]["collation"]) + + async def test_aggregate(self): + await self.db.test.aggregate([{"$group": {"_id": 42}}], collation=self.collation) + self.assertCollationInLastCommand() + + async def test_count_documents(self): + await self.db.test.count_documents({}, collation=self.collation) + self.assertCollationInLastCommand() + + async def test_distinct(self): + await self.db.test.distinct("foo", collation=self.collation) + self.assertCollationInLastCommand() + + self.listener.reset() + await self.db.test.find(collation=self.collation).distinct("foo") + self.assertCollationInLastCommand() + + async def test_find_command(self): + await self.db.test.insert_one({"is this thing on?": True}) + self.listener.reset() + await anext(self.db.test.find(collation=self.collation)) + self.assertCollationInLastCommand() + + async def test_explain_command(self): + self.listener.reset() + await self.db.test.find(collation=self.collation).explain() + # The collation should be part of the explained command. + self.assertEqual( + self.collation.document, self.last_command_started()["explain"]["collation"] + ) + + async def test_delete(self): + await self.db.test.delete_one({"foo": 42}, collation=self.collation) + command = self.listener.started_events[0].command + self.assertEqual(self.collation.document, command["deletes"][0]["collation"]) + + self.listener.reset() + await self.db.test.delete_many({"foo": 42}, collation=self.collation) + command = self.listener.started_events[0].command + self.assertEqual(self.collation.document, command["deletes"][0]["collation"]) + + async def test_update(self): + await self.db.test.replace_one({"foo": 42}, {"foo": 43}, collation=self.collation) + command = self.listener.started_events[0].command + self.assertEqual(self.collation.document, command["updates"][0]["collation"]) + + self.listener.reset() + await self.db.test.update_one({"foo": 42}, {"$set": {"foo": 43}}, collation=self.collation) + command = self.listener.started_events[0].command + self.assertEqual(self.collation.document, command["updates"][0]["collation"]) + + self.listener.reset() + await self.db.test.update_many({"foo": 42}, {"$set": {"foo": 43}}, collation=self.collation) + command = self.listener.started_events[0].command + self.assertEqual(self.collation.document, command["updates"][0]["collation"]) + + async def test_find_and(self): + await self.db.test.find_one_and_delete({"foo": 42}, collation=self.collation) + self.assertCollationInLastCommand() + + self.listener.reset() + await self.db.test.find_one_and_update( + {"foo": 42}, {"$set": {"foo": 43}}, collation=self.collation + ) + self.assertCollationInLastCommand() + + self.listener.reset() + await self.db.test.find_one_and_replace({"foo": 42}, {"foo": 43}, collation=self.collation) + self.assertCollationInLastCommand() + + async def test_bulk_write(self): + await self.db.test.collection.bulk_write( + [ + DeleteOne({"noCollation": 42}), + DeleteMany({"noCollation": 42}), + DeleteOne({"foo": 42}, collation=self.collation), + DeleteMany({"foo": 42}, collation=self.collation), + ReplaceOne({"noCollation": 24}, {"bar": 42}), + UpdateOne({"noCollation": 84}, {"$set": {"bar": 10}}, upsert=True), + UpdateMany({"noCollation": 45}, {"$set": {"bar": 42}}), + ReplaceOne({"foo": 24}, {"foo": 42}, collation=self.collation), + UpdateOne( + {"foo": 84}, {"$set": {"foo": 10}}, upsert=True, collation=self.collation + ), + UpdateMany({"foo": 45}, {"$set": {"foo": 42}}, collation=self.collation), + ] + ) + + delete_cmd = self.listener.started_events[0].command + update_cmd = self.listener.started_events[1].command + + def check_ops(ops): + for op in ops: + if "noCollation" in op["q"]: + self.assertNotIn("collation", op) + else: + self.assertEqual(self.collation.document, op["collation"]) + + check_ops(delete_cmd["deletes"]) + check_ops(update_cmd["updates"]) + + async def test_indexes_same_keys_different_collations(self): + await self.db.test.drop() + usa_collation = Collation("en_US") + ja_collation = Collation("ja") + await self.db.test.create_indexes( + [ + IndexModel("fieldname", collation=usa_collation), + IndexModel("fieldname", name="japanese_version", collation=ja_collation), + IndexModel("fieldname", name="simple"), + ] + ) + indexes = await self.db.test.index_information() + self.assertEqual( + usa_collation.document["locale"], indexes["fieldname_1"]["collation"]["locale"] + ) + self.assertEqual( + ja_collation.document["locale"], indexes["japanese_version"]["collation"]["locale"] + ) + self.assertNotIn("collation", indexes["simple"]) + await self.db.test.drop_index("fieldname_1") + indexes = await self.db.test.index_information() + self.assertIn("japanese_version", indexes) + self.assertIn("simple", indexes) + self.assertNotIn("fieldname", indexes) + + async def test_unacknowledged_write(self): + unacknowledged = WriteConcern(w=0) + collection = self.db.get_collection("test", write_concern=unacknowledged) + with self.assertRaises(ConfigurationError): + await collection.update_one( + {"hello": "world"}, {"$set": {"hello": "moon"}}, collation=self.collation + ) + update_one = UpdateOne( + {"hello": "world"}, {"$set": {"hello": "moon"}}, collation=self.collation + ) + with self.assertRaises(ConfigurationError): + await collection.bulk_write([update_one]) + + async def test_cursor_collation(self): + await self.db.test.insert_one({"hello": "world"}) + await anext(self.db.test.find().collation(self.collation)) + self.assertCollationInLastCommand() diff --git a/test/test_collation.py b/test/test_collation.py index 19df25c1c0..e5c1c7eb11 100644 --- a/test/test_collation.py +++ b/test/test_collation.py @@ -37,8 +37,11 @@ UpdateMany, UpdateOne, ) +from pymongo.synchronous.helpers import next from pymongo.write_concern import WriteConcern +_IS_SYNC = True + class TestCollationObject(unittest.TestCase): def test_constructor(self): @@ -96,8 +99,8 @@ class TestCollation(IntegrationTest): @classmethod @client_context.require_connection - def setUpClass(cls): - super().setUpClass() + def _setup_class(cls): + super()._setup_class() cls.listener = EventListener() cls.client = cls.unmanaged_rs_or_single_client(event_listeners=[cls.listener]) cls.db = cls.client.pymongo_test @@ -107,11 +110,11 @@ def setUpClass(cls): warnings.simplefilter("ignore", DeprecationWarning) @classmethod - def tearDownClass(cls): + def _tearDown_class(cls): cls.warn_context.__exit__() cls.warn_context = None cls.client.close() - super().tearDownClass() + super()._tearDown_class() def tearDown(self): self.listener.reset() diff --git a/tools/synchro.py b/tools/synchro.py index 48c7fc59fd..0ec8985a05 100644 --- a/tools/synchro.py +++ b/tools/synchro.py @@ -188,6 +188,7 @@ def async_only_test(f: str) -> bool: "test_client.py", "test_client_bulk_write.py", "test_client_context.py", + "test_collation.py", "test_collection.py", "test_common.py", "test_connections_survive_primary_stepdown_spec.py", From 8118aea985f017457259bff78e64656232f08eb5 Mon Sep 17 00:00:00 2001 From: Noah Stapp Date: Fri, 11 Oct 2024 08:29:12 -0400 Subject: [PATCH 020/182] =?UTF-8?q?PYTHON-4844=20-=20Skip=20async=20test?= =?UTF-8?q?=5Fencryption.AsyncTestSpec.test=5Flegacy=5Fti=E2=80=A6=20(#191?= =?UTF-8?q?4)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- test/asynchronous/test_encryption.py | 5 +++++ test/test_encryption.py | 5 +++++ 2 files changed, 10 insertions(+) diff --git a/test/asynchronous/test_encryption.py b/test/asynchronous/test_encryption.py index c3f6223384..3e52fb9e1b 100644 --- a/test/asynchronous/test_encryption.py +++ b/test/asynchronous/test_encryption.py @@ -693,6 +693,11 @@ def maybe_skip_scenario(self, test): self.skipTest("PYTHON-3706 flaky test on Windows/macOS") if "type=symbol" in desc: self.skipTest("PyMongo does not support the symbol type") + if ( + "timeoutms applied to listcollections to get collection schema" in desc + and not _IS_SYNC + ): + self.skipTest("PYTHON-4844 flaky test on async") def setup_scenario(self, scenario_def): """Override a test's setup.""" diff --git a/test/test_encryption.py b/test/test_encryption.py index 43c85e2c5b..64aa7ebf50 100644 --- a/test/test_encryption.py +++ b/test/test_encryption.py @@ -691,6 +691,11 @@ def maybe_skip_scenario(self, test): self.skipTest("PYTHON-3706 flaky test on Windows/macOS") if "type=symbol" in desc: self.skipTest("PyMongo does not support the symbol type") + if ( + "timeoutms applied to listcollections to get collection schema" in desc + and not _IS_SYNC + ): + self.skipTest("PYTHON-4844 flaky test on async") def setup_scenario(self, scenario_def): """Override a test's setup.""" From 3a662291e010cbed832c00aff8ffe7b43d470489 Mon Sep 17 00:00:00 2001 From: Noah Stapp Date: Fri, 11 Oct 2024 10:48:24 -0400 Subject: [PATCH 021/182] PYTHON-4700 - Convert CSFLE tests to async (#1907) --- .evergreen/run-tests.sh | 4 +- pymongo/asynchronous/encryption.py | 12 +- pymongo/network_layer.py | 27 +- pymongo/synchronous/encryption.py | 12 +- test/__init__.py | 11 +- test/asynchronous/__init__.py | 11 +- test/asynchronous/test_encryption.py | 257 +++++++++--------- test/asynchronous/utils_spec_runner.py | 172 +++++++++++- .../spec/legacy/timeoutMS.json | 4 +- test/test_connection_monitoring.py | 3 +- test/test_encryption.py | 255 +++++++++-------- test/test_server_selection_in_window.py | 2 +- test/utils.py | 147 ---------- test/utils_spec_runner.py | 170 +++++++++++- tools/synchro.py | 2 + 15 files changed, 655 insertions(+), 434 deletions(-) diff --git a/.evergreen/run-tests.sh b/.evergreen/run-tests.sh index 8d7a9f082a..5e8429dd28 100755 --- a/.evergreen/run-tests.sh +++ b/.evergreen/run-tests.sh @@ -257,9 +257,9 @@ if [ -z "$GREEN_FRAMEWORK" ]; then # Use --capture=tee-sys so pytest prints test output inline: # https://docs.pytest.org/en/stable/how-to/capture-stdout-stderr.html if [ -z "$TEST_SUITES" ]; then - python -m pytest -v --capture=tee-sys --durations=5 --maxfail=10 $TEST_ARGS + python -m pytest -v --capture=tee-sys --durations=5 $TEST_ARGS else - python -m pytest -v --capture=tee-sys --durations=5 --maxfail=10 -m $TEST_SUITES $TEST_ARGS + python -m pytest -v --capture=tee-sys --durations=5 -m $TEST_SUITES $TEST_ARGS fi else python green_framework_test.py $GREEN_FRAMEWORK -v $TEST_ARGS diff --git a/pymongo/asynchronous/encryption.py b/pymongo/asynchronous/encryption.py index 9b00c13e10..735e543047 100644 --- a/pymongo/asynchronous/encryption.py +++ b/pymongo/asynchronous/encryption.py @@ -180,10 +180,20 @@ async def kms_request(self, kms_context: MongoCryptKmsContext) -> None: while kms_context.bytes_needed > 0: # CSOT: update timeout. conn.settimeout(max(_csot.clamp_remaining(_KMS_CONNECT_TIMEOUT), 0)) - data = conn.recv(kms_context.bytes_needed) + if _IS_SYNC: + data = conn.recv(kms_context.bytes_needed) + else: + from pymongo.network_layer import ( # type: ignore[attr-defined] + async_receive_data_socket, + ) + + data = await async_receive_data_socket(conn, kms_context.bytes_needed) if not data: raise OSError("KMS connection closed") kms_context.feed(data) + # Async raises an OSError instead of returning empty bytes + except OSError as err: + raise OSError("KMS connection closed") from err except BLOCKING_IO_ERRORS: raise socket.timeout("timed out") from None finally: diff --git a/pymongo/network_layer.py b/pymongo/network_layer.py index 4b57620d83..d14a21f41d 100644 --- a/pymongo/network_layer.py +++ b/pymongo/network_layer.py @@ -130,7 +130,7 @@ def _is_ready(fut: Future) -> None: loop.remove_writer(fd) async def _async_receive_ssl( - conn: _sslConn, length: int, loop: AbstractEventLoop + conn: _sslConn, length: int, loop: AbstractEventLoop, once: Optional[bool] = False ) -> memoryview: mv = memoryview(bytearray(length)) total_read = 0 @@ -145,6 +145,9 @@ def _is_ready(fut: Future) -> None: read = conn.recv_into(mv[total_read:]) if read == 0: raise OSError("connection closed") + # KMS responses update their expected size after the first batch, stop reading after one loop + if once: + return mv[:read] total_read += read except BLOCKING_IO_ERRORS as exc: fd = conn.fileno() @@ -275,6 +278,28 @@ async def async_receive_data( sock.settimeout(sock_timeout) +async def async_receive_data_socket( + sock: Union[socket.socket, _sslConn], length: int +) -> memoryview: + sock_timeout = sock.gettimeout() + timeout = sock_timeout + + sock.settimeout(0.0) + loop = asyncio.get_event_loop() + try: + if _HAVE_SSL and isinstance(sock, (SSLSocket, _sslConn)): + return await asyncio.wait_for( + _async_receive_ssl(sock, length, loop, once=True), # type: ignore[arg-type] + timeout=timeout, + ) + else: + return await asyncio.wait_for(_async_receive(sock, length, loop), timeout=timeout) # type: ignore[arg-type] + except asyncio.TimeoutError as err: + raise socket.timeout("timed out") from err + finally: + sock.settimeout(sock_timeout) + + async def _async_receive(conn: socket.socket, length: int, loop: AbstractEventLoop) -> memoryview: mv = memoryview(bytearray(length)) bytes_read = 0 diff --git a/pymongo/synchronous/encryption.py b/pymongo/synchronous/encryption.py index efef6df9e8..506ff8bcba 100644 --- a/pymongo/synchronous/encryption.py +++ b/pymongo/synchronous/encryption.py @@ -180,10 +180,20 @@ def kms_request(self, kms_context: MongoCryptKmsContext) -> None: while kms_context.bytes_needed > 0: # CSOT: update timeout. conn.settimeout(max(_csot.clamp_remaining(_KMS_CONNECT_TIMEOUT), 0)) - data = conn.recv(kms_context.bytes_needed) + if _IS_SYNC: + data = conn.recv(kms_context.bytes_needed) + else: + from pymongo.network_layer import ( # type: ignore[attr-defined] + receive_data_socket, + ) + + data = receive_data_socket(conn, kms_context.bytes_needed) if not data: raise OSError("KMS connection closed") kms_context.feed(data) + # Async raises an OSError instead of returning empty bytes + except OSError as err: + raise OSError("KMS connection closed") from err except BLOCKING_IO_ERRORS: raise socket.timeout("timed out") from None finally: diff --git a/test/__init__.py b/test/__init__.py index af12bc032a..fd33fde293 100644 --- a/test/__init__.py +++ b/test/__init__.py @@ -464,11 +464,12 @@ def wrap(*args, **kwargs): if not self.connected: pair = self.pair raise SkipTest(f"Cannot connect to MongoDB on {pair}") - if iscoroutinefunction(condition) and condition(): - if wraps_async: - return f(*args, **kwargs) - else: - return f(*args, **kwargs) + if iscoroutinefunction(condition): + if condition(): + if wraps_async: + return f(*args, **kwargs) + else: + return f(*args, **kwargs) elif condition(): if wraps_async: return f(*args, **kwargs) diff --git a/test/asynchronous/__init__.py b/test/asynchronous/__init__.py index 2a44785b2f..0579828c49 100644 --- a/test/asynchronous/__init__.py +++ b/test/asynchronous/__init__.py @@ -466,11 +466,12 @@ async def wrap(*args, **kwargs): if not self.connected: pair = await self.pair raise SkipTest(f"Cannot connect to MongoDB on {pair}") - if iscoroutinefunction(condition) and await condition(): - if wraps_async: - return await f(*args, **kwargs) - else: - return f(*args, **kwargs) + if iscoroutinefunction(condition): + if await condition(): + if wraps_async: + return await f(*args, **kwargs) + else: + return f(*args, **kwargs) elif condition(): if wraps_async: return await f(*args, **kwargs) diff --git a/test/asynchronous/test_encryption.py b/test/asynchronous/test_encryption.py index 3e52fb9e1b..88b005c4b3 100644 --- a/test/asynchronous/test_encryption.py +++ b/test/asynchronous/test_encryption.py @@ -30,6 +30,7 @@ import warnings from test.asynchronous import AsyncIntegrationTest, AsyncPyMongoTestCase, async_client_context from test.asynchronous.test_bulk import AsyncBulkTestBase +from test.asynchronous.utils_spec_runner import AsyncSpecRunner, AsyncSpecTestCreator from threading import Thread from typing import Any, Dict, Mapping, Optional @@ -59,7 +60,6 @@ from test.utils import ( AllowListEventListener, OvertCommandListener, - SpecTestCreator, TopologyEventListener, async_wait_until, camel_to_snake_args, @@ -626,137 +626,132 @@ async def test_with_statement(self): KMS_TLS_OPTS = {"kmip": {"tlsCAFile": CA_PEM, "tlsCertificateKeyFile": CLIENT_PEM}} -if _IS_SYNC: - # TODO: Add asynchronous SpecRunner (https://jira.mongodb.org/browse/PYTHON-4700) - class TestSpec(AsyncSpecRunner): - @classmethod - @unittest.skipUnless(_HAVE_PYMONGOCRYPT, "pymongocrypt is not installed") - def setUpClass(cls): - super().setUpClass() - - def parse_auto_encrypt_opts(self, opts): - """Parse clientOptions.autoEncryptOpts.""" - opts = camel_to_snake_args(opts) - kms_providers = opts["kms_providers"] - if "aws" in kms_providers: - kms_providers["aws"] = AWS_CREDS - if not any(AWS_CREDS.values()): - self.skipTest("AWS environment credentials are not set") - if "awsTemporary" in kms_providers: - kms_providers["aws"] = AWS_TEMP_CREDS - del kms_providers["awsTemporary"] - if not any(AWS_TEMP_CREDS.values()): - self.skipTest("AWS Temp environment credentials are not set") - if "awsTemporaryNoSessionToken" in kms_providers: - kms_providers["aws"] = AWS_TEMP_NO_SESSION_CREDS - del kms_providers["awsTemporaryNoSessionToken"] - if not any(AWS_TEMP_NO_SESSION_CREDS.values()): - self.skipTest("AWS Temp environment credentials are not set") - if "azure" in kms_providers: - kms_providers["azure"] = AZURE_CREDS - if not any(AZURE_CREDS.values()): - self.skipTest("Azure environment credentials are not set") - if "gcp" in kms_providers: - kms_providers["gcp"] = GCP_CREDS - if not any(AZURE_CREDS.values()): - self.skipTest("GCP environment credentials are not set") - if "kmip" in kms_providers: - kms_providers["kmip"] = KMIP_CREDS - opts["kms_tls_options"] = KMS_TLS_OPTS - if "key_vault_namespace" not in opts: - opts["key_vault_namespace"] = "keyvault.datakeys" - if "extra_options" in opts: - opts.update(camel_to_snake_args(opts.pop("extra_options"))) - - opts = dict(opts) - return AutoEncryptionOpts(**opts) - - def parse_client_options(self, opts): - """Override clientOptions parsing to support autoEncryptOpts.""" - encrypt_opts = opts.pop("autoEncryptOpts", None) - if encrypt_opts: - opts["auto_encryption_opts"] = self.parse_auto_encrypt_opts(encrypt_opts) - - return super().parse_client_options(opts) - - def get_object_name(self, op): - """Default object is collection.""" - return op.get("object", "collection") - - def maybe_skip_scenario(self, test): - super().maybe_skip_scenario(test) - desc = test["description"].lower() - if ( - "timeoutms applied to listcollections to get collection schema" in desc - and sys.platform in ("win32", "darwin") - ): - self.skipTest("PYTHON-3706 flaky test on Windows/macOS") - if "type=symbol" in desc: - self.skipTest("PyMongo does not support the symbol type") - if ( - "timeoutms applied to listcollections to get collection schema" in desc - and not _IS_SYNC - ): - self.skipTest("PYTHON-4844 flaky test on async") - - def setup_scenario(self, scenario_def): - """Override a test's setup.""" - key_vault_data = scenario_def["key_vault_data"] - encrypted_fields = scenario_def["encrypted_fields"] - json_schema = scenario_def["json_schema"] - data = scenario_def["data"] - coll = async_client_context.client.get_database("keyvault", codec_options=OPTS)[ - "datakeys" - ] - coll.delete_many({}) - if key_vault_data: - coll.insert_many(key_vault_data) - - db_name = self.get_scenario_db_name(scenario_def) - coll_name = self.get_scenario_coll_name(scenario_def) - db = async_client_context.client.get_database(db_name, codec_options=OPTS) - coll = db.drop_collection(coll_name, encrypted_fields=encrypted_fields) - wc = WriteConcern(w="majority") - kwargs: Dict[str, Any] = {} - if json_schema: - kwargs["validator"] = {"$jsonSchema": json_schema} - kwargs["codec_options"] = OPTS - if not data: - kwargs["write_concern"] = wc - if encrypted_fields: - kwargs["encryptedFields"] = encrypted_fields - db.create_collection(coll_name, **kwargs) - coll = db[coll_name] - if data: - # Load data. - coll.with_options(write_concern=wc).insert_many(scenario_def["data"]) - - def allowable_errors(self, op): - """Override expected error classes.""" - errors = super().allowable_errors(op) - # An updateOne test expects encryption to error when no $ operator - # appears but pymongo raises a client side ValueError in this case. - if op["name"] == "updateOne": - errors += (ValueError,) - return errors - - def create_test(scenario_def, test, name): - @async_client_context.require_test_commands - def run_scenario(self): - self.run_scenario(scenario_def, test) - - return run_scenario - - test_creator = SpecTestCreator(create_test, TestSpec, os.path.join(SPEC_PATH, "legacy")) - test_creator.create_tests() - - if _HAVE_PYMONGOCRYPT: - globals().update( - generate_test_classes( - os.path.join(SPEC_PATH, "unified"), - module=__name__, - ) +class AsyncTestSpec(AsyncSpecRunner): + @classmethod + @unittest.skipUnless(_HAVE_PYMONGOCRYPT, "pymongocrypt is not installed") + async def _setup_class(cls): + await super()._setup_class() + + def parse_auto_encrypt_opts(self, opts): + """Parse clientOptions.autoEncryptOpts.""" + opts = camel_to_snake_args(opts) + kms_providers = opts["kms_providers"] + if "aws" in kms_providers: + kms_providers["aws"] = AWS_CREDS + if not any(AWS_CREDS.values()): + self.skipTest("AWS environment credentials are not set") + if "awsTemporary" in kms_providers: + kms_providers["aws"] = AWS_TEMP_CREDS + del kms_providers["awsTemporary"] + if not any(AWS_TEMP_CREDS.values()): + self.skipTest("AWS Temp environment credentials are not set") + if "awsTemporaryNoSessionToken" in kms_providers: + kms_providers["aws"] = AWS_TEMP_NO_SESSION_CREDS + del kms_providers["awsTemporaryNoSessionToken"] + if not any(AWS_TEMP_NO_SESSION_CREDS.values()): + self.skipTest("AWS Temp environment credentials are not set") + if "azure" in kms_providers: + kms_providers["azure"] = AZURE_CREDS + if not any(AZURE_CREDS.values()): + self.skipTest("Azure environment credentials are not set") + if "gcp" in kms_providers: + kms_providers["gcp"] = GCP_CREDS + if not any(AZURE_CREDS.values()): + self.skipTest("GCP environment credentials are not set") + if "kmip" in kms_providers: + kms_providers["kmip"] = KMIP_CREDS + opts["kms_tls_options"] = KMS_TLS_OPTS + if "key_vault_namespace" not in opts: + opts["key_vault_namespace"] = "keyvault.datakeys" + if "extra_options" in opts: + opts.update(camel_to_snake_args(opts.pop("extra_options"))) + + opts = dict(opts) + return AutoEncryptionOpts(**opts) + + def parse_client_options(self, opts): + """Override clientOptions parsing to support autoEncryptOpts.""" + encrypt_opts = opts.pop("autoEncryptOpts", None) + if encrypt_opts: + opts["auto_encryption_opts"] = self.parse_auto_encrypt_opts(encrypt_opts) + + return super().parse_client_options(opts) + + def get_object_name(self, op): + """Default object is collection.""" + return op.get("object", "collection") + + def maybe_skip_scenario(self, test): + super().maybe_skip_scenario(test) + desc = test["description"].lower() + if ( + "timeoutms applied to listcollections to get collection schema" in desc + and sys.platform in ("win32", "darwin") + ): + self.skipTest("PYTHON-3706 flaky test on Windows/macOS") + if "type=symbol" in desc: + self.skipTest("PyMongo does not support the symbol type") + if "timeoutms applied to listcollections to get collection schema" in desc and not _IS_SYNC: + self.skipTest("PYTHON-4844 flaky test on async") + + async def setup_scenario(self, scenario_def): + """Override a test's setup.""" + key_vault_data = scenario_def["key_vault_data"] + encrypted_fields = scenario_def["encrypted_fields"] + json_schema = scenario_def["json_schema"] + data = scenario_def["data"] + coll = async_client_context.client.get_database("keyvault", codec_options=OPTS)["datakeys"] + await coll.delete_many({}) + if key_vault_data: + await coll.insert_many(key_vault_data) + + db_name = self.get_scenario_db_name(scenario_def) + coll_name = self.get_scenario_coll_name(scenario_def) + db = async_client_context.client.get_database(db_name, codec_options=OPTS) + await db.drop_collection(coll_name, encrypted_fields=encrypted_fields) + wc = WriteConcern(w="majority") + kwargs: Dict[str, Any] = {} + if json_schema: + kwargs["validator"] = {"$jsonSchema": json_schema} + kwargs["codec_options"] = OPTS + if not data: + kwargs["write_concern"] = wc + if encrypted_fields: + kwargs["encryptedFields"] = encrypted_fields + await db.create_collection(coll_name, **kwargs) + coll = db[coll_name] + if data: + # Load data. + await coll.with_options(write_concern=wc).insert_many(scenario_def["data"]) + + def allowable_errors(self, op): + """Override expected error classes.""" + errors = super().allowable_errors(op) + # An updateOne test expects encryption to error when no $ operator + # appears but pymongo raises a client side ValueError in this case. + if op["name"] == "updateOne": + errors += (ValueError,) + return errors + + +async def create_test(scenario_def, test, name): + @async_client_context.require_test_commands + async def run_scenario(self): + await self.run_scenario(scenario_def, test) + + return run_scenario + + +test_creator = AsyncSpecTestCreator(create_test, AsyncTestSpec, os.path.join(SPEC_PATH, "legacy")) +test_creator.create_tests() + +if _HAVE_PYMONGOCRYPT: + globals().update( + generate_test_classes( + os.path.join(SPEC_PATH, "unified"), + module=__name__, ) + ) # Prose Tests ALL_KMS_PROVIDERS = { diff --git a/test/asynchronous/utils_spec_runner.py b/test/asynchronous/utils_spec_runner.py index 12cb13c2cd..4d9c4c8f20 100644 --- a/test/asynchronous/utils_spec_runner.py +++ b/test/asynchronous/utils_spec_runner.py @@ -15,8 +15,12 @@ """Utilities for testing driver specs.""" from __future__ import annotations +import asyncio import functools +import os import threading +import unittest +from asyncio import iscoroutinefunction from collections import abc from test.asynchronous import AsyncIntegrationTest, async_client_context, client_knobs from test.utils import ( @@ -24,6 +28,7 @@ CompareType, EventListener, OvertCommandListener, + ScenarioDict, ServerAndTopologyEventListener, camel_to_snake, camel_to_snake_args, @@ -32,11 +37,12 @@ ) from typing import List -from bson import ObjectId, decode, encode +from bson import ObjectId, decode, encode, json_util from bson.binary import Binary from bson.int64 import Int64 from bson.son import SON from gridfs import GridFSBucket +from gridfs.asynchronous.grid_file import AsyncGridFSBucket from pymongo.asynchronous import client_session from pymongo.asynchronous.command_cursor import AsyncCommandCursor from pymongo.asynchronous.cursor import AsyncCursor @@ -83,6 +89,161 @@ def run(self): self.stop() +class AsyncSpecTestCreator: + """Class to create test cases from specifications.""" + + def __init__(self, create_test, test_class, test_path): + """Create a TestCreator object. + + :Parameters: + - `create_test`: callback that returns a test case. The callback + must accept the following arguments - a dictionary containing the + entire test specification (the `scenario_def`), a dictionary + containing the specification for which the test case will be + generated (the `test_def`). + - `test_class`: the unittest.TestCase class in which to create the + test case. + - `test_path`: path to the directory containing the JSON files with + the test specifications. + """ + self._create_test = create_test + self._test_class = test_class + self.test_path = test_path + + def _ensure_min_max_server_version(self, scenario_def, method): + """Test modifier that enforces a version range for the server on a + test case. + """ + if "minServerVersion" in scenario_def: + min_ver = tuple(int(elt) for elt in scenario_def["minServerVersion"].split(".")) + if min_ver is not None: + method = async_client_context.require_version_min(*min_ver)(method) + + if "maxServerVersion" in scenario_def: + max_ver = tuple(int(elt) for elt in scenario_def["maxServerVersion"].split(".")) + if max_ver is not None: + method = async_client_context.require_version_max(*max_ver)(method) + + if "serverless" in scenario_def: + serverless = scenario_def["serverless"] + if serverless == "require": + serverless_satisfied = async_client_context.serverless + elif serverless == "forbid": + serverless_satisfied = not async_client_context.serverless + else: # unset or "allow" + serverless_satisfied = True + method = unittest.skipUnless( + serverless_satisfied, "Serverless requirement not satisfied" + )(method) + + return method + + @staticmethod + async def valid_topology(run_on_req): + return await async_client_context.is_topology_type( + run_on_req.get("topology", ["single", "replicaset", "sharded", "load-balanced"]) + ) + + @staticmethod + def min_server_version(run_on_req): + version = run_on_req.get("minServerVersion") + if version: + min_ver = tuple(int(elt) for elt in version.split(".")) + return async_client_context.version >= min_ver + return True + + @staticmethod + def max_server_version(run_on_req): + version = run_on_req.get("maxServerVersion") + if version: + max_ver = tuple(int(elt) for elt in version.split(".")) + return async_client_context.version <= max_ver + return True + + @staticmethod + def valid_auth_enabled(run_on_req): + if "authEnabled" in run_on_req: + if run_on_req["authEnabled"]: + return async_client_context.auth_enabled + return not async_client_context.auth_enabled + return True + + @staticmethod + def serverless_ok(run_on_req): + serverless = run_on_req["serverless"] + if serverless == "require": + return async_client_context.serverless + elif serverless == "forbid": + return not async_client_context.serverless + else: # unset or "allow" + return True + + async def should_run_on(self, scenario_def): + run_on = scenario_def.get("runOn", []) + if not run_on: + # Always run these tests. + return True + + for req in run_on: + if ( + await self.valid_topology(req) + and self.min_server_version(req) + and self.max_server_version(req) + and self.valid_auth_enabled(req) + and self.serverless_ok(req) + ): + return True + return False + + def ensure_run_on(self, scenario_def, method): + """Test modifier that enforces a 'runOn' on a test case.""" + + async def predicate(): + return await self.should_run_on(scenario_def) + + return async_client_context._require(predicate, "runOn not satisfied", method) + + def tests(self, scenario_def): + """Allow CMAP spec test to override the location of test.""" + return scenario_def["tests"] + + async def _create_tests(self): + for dirpath, _, filenames in os.walk(self.test_path): + dirname = os.path.split(dirpath)[-1] + + for filename in filenames: + with open(os.path.join(dirpath, filename)) as scenario_stream: # noqa: ASYNC101, RUF100 + # Use tz_aware=False to match how CodecOptions decodes + # dates. + opts = json_util.JSONOptions(tz_aware=False) + scenario_def = ScenarioDict( + json_util.loads(scenario_stream.read(), json_options=opts) + ) + + test_type = os.path.splitext(filename)[0] + + # Construct test from scenario. + for test_def in self.tests(scenario_def): + test_name = "test_{}_{}_{}".format( + dirname, + test_type.replace("-", "_").replace(".", "_"), + str(test_def["description"].replace(" ", "_").replace(".", "_")), + ) + + new_test = await self._create_test(scenario_def, test_def, test_name) + new_test = self._ensure_min_max_server_version(scenario_def, new_test) + new_test = self.ensure_run_on(scenario_def, new_test) + + new_test.__name__ = test_name + setattr(self._test_class, new_test.__name__, new_test) + + def create_tests(self): + if _IS_SYNC: + self._create_tests() + else: + asyncio.run(self._create_tests()) + + class AsyncSpecRunner(AsyncIntegrationTest): mongos_clients: List knobs: client_knobs @@ -284,7 +445,7 @@ async def run_operation(self, sessions, collection, operation): if object_name == "gridfsbucket": # Only create the GridFSBucket when we need it (for the gridfs # retryable reads tests). - obj = GridFSBucket(database, bucket_name=collection.name) + obj = AsyncGridFSBucket(database, bucket_name=collection.name) else: objects = { "client": database.client, @@ -312,7 +473,10 @@ async def run_operation(self, sessions, collection, operation): args.update(arguments) arguments = args - result = cmd(**dict(arguments)) + if not _IS_SYNC and iscoroutinefunction(cmd): + result = await cmd(**dict(arguments)) + else: + result = cmd(**dict(arguments)) # Cleanup open change stream cursors. if name == "watch": self.addAsyncCleanup(result.close) @@ -588,7 +752,7 @@ async def run_scenario(self, scenario_def, test): read_preference=ReadPreference.PRIMARY, read_concern=ReadConcern("local"), ) - actual_data = await (await outcome_coll.find(sort=[("_id", 1)])).to_list() + actual_data = await outcome_coll.find(sort=[("_id", 1)]).to_list() # The expected data needs to be the left hand side here otherwise # CompareType(Binary) doesn't work. diff --git a/test/client-side-encryption/spec/legacy/timeoutMS.json b/test/client-side-encryption/spec/legacy/timeoutMS.json index b667767cfc..8411306224 100644 --- a/test/client-side-encryption/spec/legacy/timeoutMS.json +++ b/test/client-side-encryption/spec/legacy/timeoutMS.json @@ -110,7 +110,7 @@ "listCollections" ], "blockConnection": true, - "blockTimeMS": 60 + "blockTimeMS": 600 } }, "clientOptions": { @@ -119,7 +119,7 @@ "aws": {} } }, - "timeoutMS": 50 + "timeoutMS": 500 }, "operations": [ { diff --git a/test/test_connection_monitoring.py b/test/test_connection_monitoring.py index 142af0f9a7..d576a1184a 100644 --- a/test/test_connection_monitoring.py +++ b/test/test_connection_monitoring.py @@ -25,14 +25,13 @@ from test.pymongo_mocks import DummyMonitor from test.utils import ( CMAPListener, - SpecTestCreator, camel_to_snake, client_context, get_pool, get_pools, wait_until, ) -from test.utils_spec_runner import SpecRunnerThread +from test.utils_spec_runner import SpecRunnerThread, SpecTestCreator from bson.objectid import ObjectId from bson.son import SON diff --git a/test/test_encryption.py b/test/test_encryption.py index 64aa7ebf50..13a69ca9ad 100644 --- a/test/test_encryption.py +++ b/test/test_encryption.py @@ -30,6 +30,7 @@ import warnings from test import IntegrationTest, PyMongoTestCase, client_context from test.test_bulk import BulkTestBase +from test.utils_spec_runner import SpecRunner, SpecTestCreator from threading import Thread from typing import Any, Dict, Mapping, Optional @@ -58,7 +59,6 @@ from test.utils import ( AllowListEventListener, OvertCommandListener, - SpecTestCreator, TopologyEventListener, camel_to_snake_args, is_greenthread_patched, @@ -624,135 +624,132 @@ def test_with_statement(self): KMS_TLS_OPTS = {"kmip": {"tlsCAFile": CA_PEM, "tlsCertificateKeyFile": CLIENT_PEM}} -if _IS_SYNC: - # TODO: Add synchronous SpecRunner (https://jira.mongodb.org/browse/PYTHON-4700) - class TestSpec(SpecRunner): - @classmethod - @unittest.skipUnless(_HAVE_PYMONGOCRYPT, "pymongocrypt is not installed") - def setUpClass(cls): - super().setUpClass() - - def parse_auto_encrypt_opts(self, opts): - """Parse clientOptions.autoEncryptOpts.""" - opts = camel_to_snake_args(opts) - kms_providers = opts["kms_providers"] - if "aws" in kms_providers: - kms_providers["aws"] = AWS_CREDS - if not any(AWS_CREDS.values()): - self.skipTest("AWS environment credentials are not set") - if "awsTemporary" in kms_providers: - kms_providers["aws"] = AWS_TEMP_CREDS - del kms_providers["awsTemporary"] - if not any(AWS_TEMP_CREDS.values()): - self.skipTest("AWS Temp environment credentials are not set") - if "awsTemporaryNoSessionToken" in kms_providers: - kms_providers["aws"] = AWS_TEMP_NO_SESSION_CREDS - del kms_providers["awsTemporaryNoSessionToken"] - if not any(AWS_TEMP_NO_SESSION_CREDS.values()): - self.skipTest("AWS Temp environment credentials are not set") - if "azure" in kms_providers: - kms_providers["azure"] = AZURE_CREDS - if not any(AZURE_CREDS.values()): - self.skipTest("Azure environment credentials are not set") - if "gcp" in kms_providers: - kms_providers["gcp"] = GCP_CREDS - if not any(AZURE_CREDS.values()): - self.skipTest("GCP environment credentials are not set") - if "kmip" in kms_providers: - kms_providers["kmip"] = KMIP_CREDS - opts["kms_tls_options"] = KMS_TLS_OPTS - if "key_vault_namespace" not in opts: - opts["key_vault_namespace"] = "keyvault.datakeys" - if "extra_options" in opts: - opts.update(camel_to_snake_args(opts.pop("extra_options"))) - - opts = dict(opts) - return AutoEncryptionOpts(**opts) - - def parse_client_options(self, opts): - """Override clientOptions parsing to support autoEncryptOpts.""" - encrypt_opts = opts.pop("autoEncryptOpts", None) - if encrypt_opts: - opts["auto_encryption_opts"] = self.parse_auto_encrypt_opts(encrypt_opts) - - return super().parse_client_options(opts) - - def get_object_name(self, op): - """Default object is collection.""" - return op.get("object", "collection") - - def maybe_skip_scenario(self, test): - super().maybe_skip_scenario(test) - desc = test["description"].lower() - if ( - "timeoutms applied to listcollections to get collection schema" in desc - and sys.platform in ("win32", "darwin") - ): - self.skipTest("PYTHON-3706 flaky test on Windows/macOS") - if "type=symbol" in desc: - self.skipTest("PyMongo does not support the symbol type") - if ( - "timeoutms applied to listcollections to get collection schema" in desc - and not _IS_SYNC - ): - self.skipTest("PYTHON-4844 flaky test on async") - - def setup_scenario(self, scenario_def): - """Override a test's setup.""" - key_vault_data = scenario_def["key_vault_data"] - encrypted_fields = scenario_def["encrypted_fields"] - json_schema = scenario_def["json_schema"] - data = scenario_def["data"] - coll = client_context.client.get_database("keyvault", codec_options=OPTS)["datakeys"] - coll.delete_many({}) - if key_vault_data: - coll.insert_many(key_vault_data) - - db_name = self.get_scenario_db_name(scenario_def) - coll_name = self.get_scenario_coll_name(scenario_def) - db = client_context.client.get_database(db_name, codec_options=OPTS) - coll = db.drop_collection(coll_name, encrypted_fields=encrypted_fields) - wc = WriteConcern(w="majority") - kwargs: Dict[str, Any] = {} - if json_schema: - kwargs["validator"] = {"$jsonSchema": json_schema} - kwargs["codec_options"] = OPTS - if not data: - kwargs["write_concern"] = wc - if encrypted_fields: - kwargs["encryptedFields"] = encrypted_fields - db.create_collection(coll_name, **kwargs) - coll = db[coll_name] - if data: - # Load data. - coll.with_options(write_concern=wc).insert_many(scenario_def["data"]) - - def allowable_errors(self, op): - """Override expected error classes.""" - errors = super().allowable_errors(op) - # An updateOne test expects encryption to error when no $ operator - # appears but pymongo raises a client side ValueError in this case. - if op["name"] == "updateOne": - errors += (ValueError,) - return errors - - def create_test(scenario_def, test, name): - @client_context.require_test_commands - def run_scenario(self): - self.run_scenario(scenario_def, test) - - return run_scenario - - test_creator = SpecTestCreator(create_test, TestSpec, os.path.join(SPEC_PATH, "legacy")) - test_creator.create_tests() - - if _HAVE_PYMONGOCRYPT: - globals().update( - generate_test_classes( - os.path.join(SPEC_PATH, "unified"), - module=__name__, - ) +class TestSpec(SpecRunner): + @classmethod + @unittest.skipUnless(_HAVE_PYMONGOCRYPT, "pymongocrypt is not installed") + def _setup_class(cls): + super()._setup_class() + + def parse_auto_encrypt_opts(self, opts): + """Parse clientOptions.autoEncryptOpts.""" + opts = camel_to_snake_args(opts) + kms_providers = opts["kms_providers"] + if "aws" in kms_providers: + kms_providers["aws"] = AWS_CREDS + if not any(AWS_CREDS.values()): + self.skipTest("AWS environment credentials are not set") + if "awsTemporary" in kms_providers: + kms_providers["aws"] = AWS_TEMP_CREDS + del kms_providers["awsTemporary"] + if not any(AWS_TEMP_CREDS.values()): + self.skipTest("AWS Temp environment credentials are not set") + if "awsTemporaryNoSessionToken" in kms_providers: + kms_providers["aws"] = AWS_TEMP_NO_SESSION_CREDS + del kms_providers["awsTemporaryNoSessionToken"] + if not any(AWS_TEMP_NO_SESSION_CREDS.values()): + self.skipTest("AWS Temp environment credentials are not set") + if "azure" in kms_providers: + kms_providers["azure"] = AZURE_CREDS + if not any(AZURE_CREDS.values()): + self.skipTest("Azure environment credentials are not set") + if "gcp" in kms_providers: + kms_providers["gcp"] = GCP_CREDS + if not any(AZURE_CREDS.values()): + self.skipTest("GCP environment credentials are not set") + if "kmip" in kms_providers: + kms_providers["kmip"] = KMIP_CREDS + opts["kms_tls_options"] = KMS_TLS_OPTS + if "key_vault_namespace" not in opts: + opts["key_vault_namespace"] = "keyvault.datakeys" + if "extra_options" in opts: + opts.update(camel_to_snake_args(opts.pop("extra_options"))) + + opts = dict(opts) + return AutoEncryptionOpts(**opts) + + def parse_client_options(self, opts): + """Override clientOptions parsing to support autoEncryptOpts.""" + encrypt_opts = opts.pop("autoEncryptOpts", None) + if encrypt_opts: + opts["auto_encryption_opts"] = self.parse_auto_encrypt_opts(encrypt_opts) + + return super().parse_client_options(opts) + + def get_object_name(self, op): + """Default object is collection.""" + return op.get("object", "collection") + + def maybe_skip_scenario(self, test): + super().maybe_skip_scenario(test) + desc = test["description"].lower() + if ( + "timeoutms applied to listcollections to get collection schema" in desc + and sys.platform in ("win32", "darwin") + ): + self.skipTest("PYTHON-3706 flaky test on Windows/macOS") + if "type=symbol" in desc: + self.skipTest("PyMongo does not support the symbol type") + if "timeoutms applied to listcollections to get collection schema" in desc and not _IS_SYNC: + self.skipTest("PYTHON-4844 flaky test on async") + + def setup_scenario(self, scenario_def): + """Override a test's setup.""" + key_vault_data = scenario_def["key_vault_data"] + encrypted_fields = scenario_def["encrypted_fields"] + json_schema = scenario_def["json_schema"] + data = scenario_def["data"] + coll = client_context.client.get_database("keyvault", codec_options=OPTS)["datakeys"] + coll.delete_many({}) + if key_vault_data: + coll.insert_many(key_vault_data) + + db_name = self.get_scenario_db_name(scenario_def) + coll_name = self.get_scenario_coll_name(scenario_def) + db = client_context.client.get_database(db_name, codec_options=OPTS) + db.drop_collection(coll_name, encrypted_fields=encrypted_fields) + wc = WriteConcern(w="majority") + kwargs: Dict[str, Any] = {} + if json_schema: + kwargs["validator"] = {"$jsonSchema": json_schema} + kwargs["codec_options"] = OPTS + if not data: + kwargs["write_concern"] = wc + if encrypted_fields: + kwargs["encryptedFields"] = encrypted_fields + db.create_collection(coll_name, **kwargs) + coll = db[coll_name] + if data: + # Load data. + coll.with_options(write_concern=wc).insert_many(scenario_def["data"]) + + def allowable_errors(self, op): + """Override expected error classes.""" + errors = super().allowable_errors(op) + # An updateOne test expects encryption to error when no $ operator + # appears but pymongo raises a client side ValueError in this case. + if op["name"] == "updateOne": + errors += (ValueError,) + return errors + + +def create_test(scenario_def, test, name): + @client_context.require_test_commands + def run_scenario(self): + self.run_scenario(scenario_def, test) + + return run_scenario + + +test_creator = SpecTestCreator(create_test, TestSpec, os.path.join(SPEC_PATH, "legacy")) +test_creator.create_tests() + +if _HAVE_PYMONGOCRYPT: + globals().update( + generate_test_classes( + os.path.join(SPEC_PATH, "unified"), + module=__name__, ) + ) # Prose Tests ALL_KMS_PROVIDERS = { diff --git a/test/test_server_selection_in_window.py b/test/test_server_selection_in_window.py index 7cab42cca2..05772fa385 100644 --- a/test/test_server_selection_in_window.py +++ b/test/test_server_selection_in_window.py @@ -21,11 +21,11 @@ from test.utils import ( CMAPListener, OvertCommandListener, - SpecTestCreator, get_pool, wait_until, ) from test.utils_selection_tests import create_topology +from test.utils_spec_runner import SpecTestCreator from pymongo.common import clean_node from pymongo.monitoring import ConnectionReadyEvent diff --git a/test/utils.py b/test/utils.py index 9c78cff3ad..4575a9fe10 100644 --- a/test/utils.py +++ b/test/utils.py @@ -418,153 +418,6 @@ def call_count(self): return len(self._call_list) -class SpecTestCreator: - """Class to create test cases from specifications.""" - - def __init__(self, create_test, test_class, test_path): - """Create a TestCreator object. - - :Parameters: - - `create_test`: callback that returns a test case. The callback - must accept the following arguments - a dictionary containing the - entire test specification (the `scenario_def`), a dictionary - containing the specification for which the test case will be - generated (the `test_def`). - - `test_class`: the unittest.TestCase class in which to create the - test case. - - `test_path`: path to the directory containing the JSON files with - the test specifications. - """ - self._create_test = create_test - self._test_class = test_class - self.test_path = test_path - - def _ensure_min_max_server_version(self, scenario_def, method): - """Test modifier that enforces a version range for the server on a - test case. - """ - if "minServerVersion" in scenario_def: - min_ver = tuple(int(elt) for elt in scenario_def["minServerVersion"].split(".")) - if min_ver is not None: - method = client_context.require_version_min(*min_ver)(method) - - if "maxServerVersion" in scenario_def: - max_ver = tuple(int(elt) for elt in scenario_def["maxServerVersion"].split(".")) - if max_ver is not None: - method = client_context.require_version_max(*max_ver)(method) - - if "serverless" in scenario_def: - serverless = scenario_def["serverless"] - if serverless == "require": - serverless_satisfied = client_context.serverless - elif serverless == "forbid": - serverless_satisfied = not client_context.serverless - else: # unset or "allow" - serverless_satisfied = True - method = unittest.skipUnless( - serverless_satisfied, "Serverless requirement not satisfied" - )(method) - - return method - - @staticmethod - def valid_topology(run_on_req): - return client_context.is_topology_type( - run_on_req.get("topology", ["single", "replicaset", "sharded", "load-balanced"]) - ) - - @staticmethod - def min_server_version(run_on_req): - version = run_on_req.get("minServerVersion") - if version: - min_ver = tuple(int(elt) for elt in version.split(".")) - return client_context.version >= min_ver - return True - - @staticmethod - def max_server_version(run_on_req): - version = run_on_req.get("maxServerVersion") - if version: - max_ver = tuple(int(elt) for elt in version.split(".")) - return client_context.version <= max_ver - return True - - @staticmethod - def valid_auth_enabled(run_on_req): - if "authEnabled" in run_on_req: - if run_on_req["authEnabled"]: - return client_context.auth_enabled - return not client_context.auth_enabled - return True - - @staticmethod - def serverless_ok(run_on_req): - serverless = run_on_req["serverless"] - if serverless == "require": - return client_context.serverless - elif serverless == "forbid": - return not client_context.serverless - else: # unset or "allow" - return True - - def should_run_on(self, scenario_def): - run_on = scenario_def.get("runOn", []) - if not run_on: - # Always run these tests. - return True - - for req in run_on: - if ( - self.valid_topology(req) - and self.min_server_version(req) - and self.max_server_version(req) - and self.valid_auth_enabled(req) - and self.serverless_ok(req) - ): - return True - return False - - def ensure_run_on(self, scenario_def, method): - """Test modifier that enforces a 'runOn' on a test case.""" - return client_context._require( - lambda: self.should_run_on(scenario_def), "runOn not satisfied", method - ) - - def tests(self, scenario_def): - """Allow CMAP spec test to override the location of test.""" - return scenario_def["tests"] - - def create_tests(self): - for dirpath, _, filenames in os.walk(self.test_path): - dirname = os.path.split(dirpath)[-1] - - for filename in filenames: - with open(os.path.join(dirpath, filename)) as scenario_stream: - # Use tz_aware=False to match how CodecOptions decodes - # dates. - opts = json_util.JSONOptions(tz_aware=False) - scenario_def = ScenarioDict( - json_util.loads(scenario_stream.read(), json_options=opts) - ) - - test_type = os.path.splitext(filename)[0] - - # Construct test from scenario. - for test_def in self.tests(scenario_def): - test_name = "test_{}_{}_{}".format( - dirname, - test_type.replace("-", "_").replace(".", "_"), - str(test_def["description"].replace(" ", "_").replace(".", "_")), - ) - - new_test = self._create_test(scenario_def, test_def, test_name) - new_test = self._ensure_min_max_server_version(scenario_def, new_test) - new_test = self.ensure_run_on(scenario_def, new_test) - - new_test.__name__ = test_name - setattr(self._test_class, new_test.__name__, new_test) - - def ensure_all_connected(client: MongoClient) -> None: """Ensure that the client's connection pool has socket connections to all members of a replica set. Raises ConfigurationError when called with a diff --git a/test/utils_spec_runner.py b/test/utils_spec_runner.py index 06a40351cd..8a061de0b1 100644 --- a/test/utils_spec_runner.py +++ b/test/utils_spec_runner.py @@ -15,8 +15,12 @@ """Utilities for testing driver specs.""" from __future__ import annotations +import asyncio import functools +import os import threading +import unittest +from asyncio import iscoroutinefunction from collections import abc from test import IntegrationTest, client_context, client_knobs from test.utils import ( @@ -24,6 +28,7 @@ CompareType, EventListener, OvertCommandListener, + ScenarioDict, ServerAndTopologyEventListener, camel_to_snake, camel_to_snake_args, @@ -32,11 +37,12 @@ ) from typing import List -from bson import ObjectId, decode, encode +from bson import ObjectId, decode, encode, json_util from bson.binary import Binary from bson.int64 import Int64 from bson.son import SON from gridfs import GridFSBucket +from gridfs.synchronous.grid_file import GridFSBucket from pymongo.errors import BulkWriteError, OperationFailure, PyMongoError from pymongo.read_concern import ReadConcern from pymongo.read_preferences import ReadPreference @@ -83,6 +89,161 @@ def run(self): self.stop() +class SpecTestCreator: + """Class to create test cases from specifications.""" + + def __init__(self, create_test, test_class, test_path): + """Create a TestCreator object. + + :Parameters: + - `create_test`: callback that returns a test case. The callback + must accept the following arguments - a dictionary containing the + entire test specification (the `scenario_def`), a dictionary + containing the specification for which the test case will be + generated (the `test_def`). + - `test_class`: the unittest.TestCase class in which to create the + test case. + - `test_path`: path to the directory containing the JSON files with + the test specifications. + """ + self._create_test = create_test + self._test_class = test_class + self.test_path = test_path + + def _ensure_min_max_server_version(self, scenario_def, method): + """Test modifier that enforces a version range for the server on a + test case. + """ + if "minServerVersion" in scenario_def: + min_ver = tuple(int(elt) for elt in scenario_def["minServerVersion"].split(".")) + if min_ver is not None: + method = client_context.require_version_min(*min_ver)(method) + + if "maxServerVersion" in scenario_def: + max_ver = tuple(int(elt) for elt in scenario_def["maxServerVersion"].split(".")) + if max_ver is not None: + method = client_context.require_version_max(*max_ver)(method) + + if "serverless" in scenario_def: + serverless = scenario_def["serverless"] + if serverless == "require": + serverless_satisfied = client_context.serverless + elif serverless == "forbid": + serverless_satisfied = not client_context.serverless + else: # unset or "allow" + serverless_satisfied = True + method = unittest.skipUnless( + serverless_satisfied, "Serverless requirement not satisfied" + )(method) + + return method + + @staticmethod + def valid_topology(run_on_req): + return client_context.is_topology_type( + run_on_req.get("topology", ["single", "replicaset", "sharded", "load-balanced"]) + ) + + @staticmethod + def min_server_version(run_on_req): + version = run_on_req.get("minServerVersion") + if version: + min_ver = tuple(int(elt) for elt in version.split(".")) + return client_context.version >= min_ver + return True + + @staticmethod + def max_server_version(run_on_req): + version = run_on_req.get("maxServerVersion") + if version: + max_ver = tuple(int(elt) for elt in version.split(".")) + return client_context.version <= max_ver + return True + + @staticmethod + def valid_auth_enabled(run_on_req): + if "authEnabled" in run_on_req: + if run_on_req["authEnabled"]: + return client_context.auth_enabled + return not client_context.auth_enabled + return True + + @staticmethod + def serverless_ok(run_on_req): + serverless = run_on_req["serverless"] + if serverless == "require": + return client_context.serverless + elif serverless == "forbid": + return not client_context.serverless + else: # unset or "allow" + return True + + def should_run_on(self, scenario_def): + run_on = scenario_def.get("runOn", []) + if not run_on: + # Always run these tests. + return True + + for req in run_on: + if ( + self.valid_topology(req) + and self.min_server_version(req) + and self.max_server_version(req) + and self.valid_auth_enabled(req) + and self.serverless_ok(req) + ): + return True + return False + + def ensure_run_on(self, scenario_def, method): + """Test modifier that enforces a 'runOn' on a test case.""" + + def predicate(): + return self.should_run_on(scenario_def) + + return client_context._require(predicate, "runOn not satisfied", method) + + def tests(self, scenario_def): + """Allow CMAP spec test to override the location of test.""" + return scenario_def["tests"] + + def _create_tests(self): + for dirpath, _, filenames in os.walk(self.test_path): + dirname = os.path.split(dirpath)[-1] + + for filename in filenames: + with open(os.path.join(dirpath, filename)) as scenario_stream: # noqa: ASYNC101, RUF100 + # Use tz_aware=False to match how CodecOptions decodes + # dates. + opts = json_util.JSONOptions(tz_aware=False) + scenario_def = ScenarioDict( + json_util.loads(scenario_stream.read(), json_options=opts) + ) + + test_type = os.path.splitext(filename)[0] + + # Construct test from scenario. + for test_def in self.tests(scenario_def): + test_name = "test_{}_{}_{}".format( + dirname, + test_type.replace("-", "_").replace(".", "_"), + str(test_def["description"].replace(" ", "_").replace(".", "_")), + ) + + new_test = self._create_test(scenario_def, test_def, test_name) + new_test = self._ensure_min_max_server_version(scenario_def, new_test) + new_test = self.ensure_run_on(scenario_def, new_test) + + new_test.__name__ = test_name + setattr(self._test_class, new_test.__name__, new_test) + + def create_tests(self): + if _IS_SYNC: + self._create_tests() + else: + asyncio.run(self._create_tests()) + + class SpecRunner(IntegrationTest): mongos_clients: List knobs: client_knobs @@ -312,7 +473,10 @@ def run_operation(self, sessions, collection, operation): args.update(arguments) arguments = args - result = cmd(**dict(arguments)) + if not _IS_SYNC and iscoroutinefunction(cmd): + result = cmd(**dict(arguments)) + else: + result = cmd(**dict(arguments)) # Cleanup open change stream cursors. if name == "watch": self.addCleanup(result.close) @@ -583,7 +747,7 @@ def run_scenario(self, scenario_def, test): read_preference=ReadPreference.PRIMARY, read_concern=ReadConcern("local"), ) - actual_data = (outcome_coll.find(sort=[("_id", 1)])).to_list() + actual_data = outcome_coll.find(sort=[("_id", 1)]).to_list() # The expected data needs to be the left hand side here otherwise # CompareType(Binary) doesn't work. diff --git a/tools/synchro.py b/tools/synchro.py index 0ec8985a05..f704919a17 100644 --- a/tools/synchro.py +++ b/tools/synchro.py @@ -105,6 +105,8 @@ "PyMongo|c|async": "PyMongo|c", "AsyncTestGridFile": "TestGridFile", "AsyncTestGridFileNoConnect": "TestGridFileNoConnect", + "AsyncTestSpec": "TestSpec", + "AsyncSpecTestCreator": "SpecTestCreator", "async_set_fail_point": "set_fail_point", "async_ensure_all_connected": "ensure_all_connected", "async_repl_set_step_down": "repl_set_step_down", From 6973d2d2743b7679080b8be70391b767740cf674 Mon Sep 17 00:00:00 2001 From: Noah Stapp Date: Fri, 11 Oct 2024 11:02:06 -0400 Subject: [PATCH 022/182] PYTHON-4528 - Convert unified test runner to async (#1913) --- test/asynchronous/unified_format.py | 1573 +++++++++++++++++++++++++++ test/unified_format.py | 711 +----------- test/unified_format_shared.py | 679 ++++++++++++ tools/synchro.py | 1 + 4 files changed, 2301 insertions(+), 663 deletions(-) create mode 100644 test/asynchronous/unified_format.py create mode 100644 test/unified_format_shared.py diff --git a/test/asynchronous/unified_format.py b/test/asynchronous/unified_format.py new file mode 100644 index 0000000000..4c37422951 --- /dev/null +++ b/test/asynchronous/unified_format.py @@ -0,0 +1,1573 @@ +# Copyright 2020-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Unified test format runner. + +https://github.com/mongodb/specifications/blob/master/source/unified-test-format/unified-test-format.rst +""" +from __future__ import annotations + +import asyncio +import binascii +import copy +import functools +import os +import re +import sys +import time +import traceback +from asyncio import iscoroutinefunction +from collections import defaultdict +from test.asynchronous import ( + AsyncIntegrationTest, + async_client_context, + client_knobs, + unittest, +) +from test.unified_format_shared import ( + IS_INTERRUPTED, + KMS_TLS_OPTS, + PLACEHOLDER_MAP, + SKIP_CSOT_TESTS, + EventListenerUtil, + MatchEvaluatorUtil, + coerce_result, + parse_bulk_write_error_result, + parse_bulk_write_result, + parse_client_bulk_write_error_result, + parse_collection_or_database_options, + with_metaclass, +) +from test.utils import ( + async_get_pool, + camel_to_snake, + camel_to_snake_args, + parse_spec_options, + prepare_spec_arguments, + snake_to_camel, + wait_until, +) +from test.utils_spec_runner import SpecRunnerThread +from test.version import Version +from typing import Any, Dict, List, Mapping, Optional + +import pymongo +from bson import SON, json_util +from bson.codec_options import DEFAULT_CODEC_OPTIONS +from bson.objectid import ObjectId +from gridfs import AsyncGridFSBucket, GridOut +from pymongo import ASCENDING, AsyncMongoClient, CursorType, _csot +from pymongo.asynchronous.change_stream import AsyncChangeStream +from pymongo.asynchronous.client_session import AsyncClientSession, TransactionOptions, _TxnState +from pymongo.asynchronous.collection import AsyncCollection +from pymongo.asynchronous.command_cursor import AsyncCommandCursor +from pymongo.asynchronous.database import AsyncDatabase +from pymongo.asynchronous.encryption import AsyncClientEncryption +from pymongo.asynchronous.helpers import anext +from pymongo.encryption_options import _HAVE_PYMONGOCRYPT +from pymongo.errors import ( + BulkWriteError, + ClientBulkWriteException, + ConfigurationError, + ConnectionFailure, + EncryptionError, + InvalidOperation, + NotPrimaryError, + OperationFailure, + PyMongoError, +) +from pymongo.monitoring import ( + CommandStartedEvent, +) +from pymongo.operations import ( + SearchIndexModel, +) +from pymongo.read_concern import ReadConcern +from pymongo.read_preferences import ReadPreference +from pymongo.server_api import ServerApi +from pymongo.server_selectors import Selection, writable_server_selector +from pymongo.server_type import SERVER_TYPE +from pymongo.topology_description import TopologyDescription +from pymongo.typings import _Address +from pymongo.write_concern import WriteConcern + +_IS_SYNC = False + + +async def is_run_on_requirement_satisfied(requirement): + topology_satisfied = True + req_topologies = requirement.get("topologies") + if req_topologies: + topology_satisfied = await async_client_context.is_topology_type(req_topologies) + + server_version = Version(*async_client_context.version[:3]) + + min_version_satisfied = True + req_min_server_version = requirement.get("minServerVersion") + if req_min_server_version: + min_version_satisfied = Version.from_string(req_min_server_version) <= server_version + + max_version_satisfied = True + req_max_server_version = requirement.get("maxServerVersion") + if req_max_server_version: + max_version_satisfied = Version.from_string(req_max_server_version) >= server_version + + serverless = requirement.get("serverless") + if serverless == "require": + serverless_satisfied = async_client_context.serverless + elif serverless == "forbid": + serverless_satisfied = not async_client_context.serverless + else: # unset or "allow" + serverless_satisfied = True + + params_satisfied = True + params = requirement.get("serverParameters") + if params: + for param, val in params.items(): + if param not in async_client_context.server_parameters: + params_satisfied = False + elif async_client_context.server_parameters[param] != val: + params_satisfied = False + + auth_satisfied = True + req_auth = requirement.get("auth") + if req_auth is not None: + if req_auth: + auth_satisfied = async_client_context.auth_enabled + if auth_satisfied and "authMechanism" in requirement: + auth_satisfied = async_client_context.check_auth_type(requirement["authMechanism"]) + else: + auth_satisfied = not async_client_context.auth_enabled + + csfle_satisfied = True + req_csfle = requirement.get("csfle") + if req_csfle is True: + min_version_satisfied = Version.from_string("4.2") <= server_version + csfle_satisfied = _HAVE_PYMONGOCRYPT and min_version_satisfied + + return ( + topology_satisfied + and min_version_satisfied + and max_version_satisfied + and serverless_satisfied + and params_satisfied + and auth_satisfied + and csfle_satisfied + ) + + +class NonLazyCursor: + """A find cursor proxy that creates the remote cursor when initialized.""" + + def __init__(self, find_cursor, client): + self.client = client + self.find_cursor = find_cursor + # Create the server side cursor. + self.first_result = None + + @classmethod + async def create(cls, find_cursor, client): + cursor = cls(find_cursor, client) + try: + cursor.first_result = await anext(cursor.find_cursor) + except StopAsyncIteration: + cursor.first_result = None + return cursor + + @property + def alive(self): + return self.first_result is not None or self.find_cursor.alive + + async def __anext__(self): + if self.first_result is not None: + first = self.first_result + self.first_result = None + return first + return await anext(self.find_cursor) + + # Added to support the iterateOnce operation. + try_next = __anext__ + + async def close(self): + await self.find_cursor.close() + self.client = None + + +class EntityMapUtil: + """Utility class that implements an entity map as per the unified + test format specification. + """ + + def __init__(self, test_class): + self._entities: Dict[str, Any] = {} + self._listeners: Dict[str, EventListenerUtil] = {} + self._session_lsids: Dict[str, Mapping[str, Any]] = {} + self.test: UnifiedSpecTestMixinV1 = test_class + self._cluster_time: Mapping[str, Any] = {} + + def __contains__(self, item): + return item in self._entities + + def __len__(self): + return len(self._entities) + + def __getitem__(self, item): + try: + return self._entities[item] + except KeyError: + self.test.fail(f"Could not find entity named {item} in map") + + def __setitem__(self, key, value): + if not isinstance(key, str): + self.test.fail("Expected entity name of type str, got %s" % (type(key))) + + if key in self._entities: + self.test.fail(f"Entity named {key} already in map") + + self._entities[key] = value + + def _handle_placeholders(self, spec: dict, current: dict, path: str) -> Any: + if "$$placeholder" in current: + if path not in PLACEHOLDER_MAP: + raise ValueError(f"Could not find a placeholder value for {path}") + return PLACEHOLDER_MAP[path] + + for key in list(current): + value = current[key] + if isinstance(value, dict): + subpath = f"{path}/{key}" + current[key] = self._handle_placeholders(spec, value, subpath) + return current + + async def _create_entity(self, entity_spec, uri=None): + if len(entity_spec) != 1: + self.test.fail(f"Entity spec {entity_spec} did not contain exactly one top-level key") + + entity_type, spec = next(iter(entity_spec.items())) + spec = self._handle_placeholders(spec, spec, "") + if entity_type == "client": + kwargs: dict = {} + observe_events = spec.get("observeEvents", []) + + # The unified tests use topologyOpeningEvent, we use topologyOpenedEvent + for i in range(len(observe_events)): + if "topologyOpeningEvent" == observe_events[i]: + observe_events[i] = "topologyOpenedEvent" + ignore_commands = spec.get("ignoreCommandMonitoringEvents", []) + observe_sensitive_commands = spec.get("observeSensitiveCommands", False) + ignore_commands = [cmd.lower() for cmd in ignore_commands] + listener = EventListenerUtil( + observe_events, + ignore_commands, + observe_sensitive_commands, + spec.get("storeEventsAsEntities"), + self, + ) + self._listeners[spec["id"]] = listener + kwargs["event_listeners"] = [listener] + if spec.get("useMultipleMongoses"): + if async_client_context.load_balancer or async_client_context.serverless: + kwargs["h"] = async_client_context.MULTI_MONGOS_LB_URI + elif async_client_context.is_mongos: + kwargs["h"] = async_client_context.mongos_seeds() + kwargs.update(spec.get("uriOptions", {})) + server_api = spec.get("serverApi") + if "waitQueueSize" in kwargs: + raise unittest.SkipTest("PyMongo does not support waitQueueSize") + if "waitQueueMultiple" in kwargs: + raise unittest.SkipTest("PyMongo does not support waitQueueMultiple") + if server_api: + kwargs["server_api"] = ServerApi( + server_api["version"], + strict=server_api.get("strict"), + deprecation_errors=server_api.get("deprecationErrors"), + ) + if uri: + kwargs["h"] = uri + client = await self.test.async_rs_or_single_client(**kwargs) + self[spec["id"]] = client + self.test.addAsyncCleanup(client.close) + return + elif entity_type == "database": + client = self[spec["client"]] + if type(client).__name__ != "AsyncMongoClient": + self.test.fail( + "Expected entity {} to be of type AsyncMongoClient, got {}".format( + spec["client"], type(client) + ) + ) + options = parse_collection_or_database_options(spec.get("databaseOptions", {})) + self[spec["id"]] = client.get_database(spec["databaseName"], **options) + return + elif entity_type == "collection": + database = self[spec["database"]] + if not isinstance(database, AsyncDatabase): + self.test.fail( + "Expected entity {} to be of type AsyncDatabase, got {}".format( + spec["database"], type(database) + ) + ) + options = parse_collection_or_database_options(spec.get("collectionOptions", {})) + self[spec["id"]] = database.get_collection(spec["collectionName"], **options) + return + elif entity_type == "session": + client = self[spec["client"]] + if type(client).__name__ != "AsyncMongoClient": + self.test.fail( + "Expected entity {} to be of type AsyncMongoClient, got {}".format( + spec["client"], type(client) + ) + ) + opts = camel_to_snake_args(spec.get("sessionOptions", {})) + if "default_transaction_options" in opts: + txn_opts = parse_spec_options(opts["default_transaction_options"]) + txn_opts = TransactionOptions(**txn_opts) + opts = copy.deepcopy(opts) + opts["default_transaction_options"] = txn_opts + session = client.start_session(**dict(opts)) + self[spec["id"]] = session + self._session_lsids[spec["id"]] = copy.deepcopy(session.session_id) + self.test.addAsyncCleanup(session.end_session) + return + elif entity_type == "bucket": + db = self[spec["database"]] + kwargs = parse_spec_options(spec.get("bucketOptions", {}).copy()) + bucket = AsyncGridFSBucket(db, **kwargs) + + # PyMongo does not support AsyncGridFSBucket.drop(), emulate it. + @_csot.apply + async def drop(self: AsyncGridFSBucket, *args: Any, **kwargs: Any) -> None: + await self._files.drop(*args, **kwargs) + await self._chunks.drop(*args, **kwargs) + + if not hasattr(bucket, "drop"): + bucket.drop = drop.__get__(bucket) + self[spec["id"]] = bucket + return + elif entity_type == "clientEncryption": + opts = camel_to_snake_args(spec["clientEncryptionOpts"].copy()) + if isinstance(opts["key_vault_client"], str): + opts["key_vault_client"] = self[opts["key_vault_client"]] + # Set TLS options for providers like "kmip:name1". + kms_tls_options = {} + for provider in opts["kms_providers"]: + provider_type = provider.split(":")[0] + if provider_type in KMS_TLS_OPTS: + kms_tls_options[provider] = KMS_TLS_OPTS[provider_type] + self[spec["id"]] = AsyncClientEncryption( + opts["kms_providers"], + opts["key_vault_namespace"], + opts["key_vault_client"], + DEFAULT_CODEC_OPTIONS, + opts.get("kms_tls_options", kms_tls_options), + ) + return + elif entity_type == "thread": + name = spec["id"] + thread = SpecRunnerThread(name) + thread.start() + self[name] = thread + return + + self.test.fail(f"Unable to create entity of unknown type {entity_type}") + + async def create_entities_from_spec(self, entity_spec, uri=None): + for spec in entity_spec: + await self._create_entity(spec, uri=uri) + + def get_listener_for_client(self, client_name: str) -> EventListenerUtil: + client = self[client_name] + if type(client).__name__ != "AsyncMongoClient": + self.test.fail( + f"Expected entity {client_name} to be of type AsyncMongoClient, got {type(client)}" + ) + + listener = self._listeners.get(client_name) + if not listener: + self.test.fail(f"No listeners configured for client {client_name}") + + return listener + + def get_lsid_for_session(self, session_name): + session = self[session_name] + if not isinstance(session, AsyncClientSession): + self.test.fail( + f"Expected entity {session_name} to be of type AsyncClientSession, got {type(session)}" + ) + + try: + return session.session_id + except InvalidOperation: + # session has been closed. + return self._session_lsids[session_name] + + async def advance_cluster_times(self) -> None: + """Manually synchronize entities when desired""" + if not self._cluster_time: + self._cluster_time = (await self.test.client.admin.command("ping")).get("$clusterTime") + for entity in self._entities.values(): + if isinstance(entity, AsyncClientSession) and self._cluster_time: + entity.advance_cluster_time(self._cluster_time) + + +class UnifiedSpecTestMixinV1(AsyncIntegrationTest): + """Mixin class to run test cases from test specification files. + + Assumes that tests conform to the `unified test format + `_. + + Specification of the test suite being currently run is available as + a class attribute ``TEST_SPEC``. + """ + + SCHEMA_VERSION = Version.from_string("1.21") + RUN_ON_LOAD_BALANCER = True + RUN_ON_SERVERLESS = True + TEST_SPEC: Any + mongos_clients: list[AsyncMongoClient] = [] + + @staticmethod + async def should_run_on(run_on_spec): + if not run_on_spec: + # Always run these tests. + return True + + for req in run_on_spec: + if await is_run_on_requirement_satisfied(req): + return True + return False + + async def insert_initial_data(self, initial_data): + for i, collection_data in enumerate(initial_data): + coll_name = collection_data["collectionName"] + db_name = collection_data["databaseName"] + opts = collection_data.get("createOptions", {}) + documents = collection_data["documents"] + + # Setup the collection with as few majority writes as possible. + db = self.client[db_name] + await db.drop_collection(coll_name) + # Only use majority wc only on the final write. + if i == len(initial_data) - 1: + wc = WriteConcern(w="majority") + else: + wc = WriteConcern(w=1) + if documents: + if opts: + await db.create_collection(coll_name, **opts) + await db.get_collection(coll_name, write_concern=wc).insert_many(documents) + else: + # Ensure collection exists + await db.create_collection(coll_name, write_concern=wc, **opts) + + @classmethod + async def _setup_class(cls): + # super call creates internal client cls.client + await super()._setup_class() + # process file-level runOnRequirements + run_on_spec = cls.TEST_SPEC.get("runOnRequirements", []) + if not await cls.should_run_on(run_on_spec): + raise unittest.SkipTest(f"{cls.__name__} runOnRequirements not satisfied") + + # add any special-casing for skipping tests here + if async_client_context.storage_engine == "mmapv1": + if "retryable-writes" in cls.TEST_SPEC["description"] or "retryable_writes" in str( + cls.TEST_PATH + ): + raise unittest.SkipTest("MMAPv1 does not support retryWrites=True") + + # Handle mongos_clients for transactions tests. + cls.mongos_clients = [] + if ( + async_client_context.supports_transactions() + and not async_client_context.load_balancer + and not async_client_context.serverless + ): + for address in async_client_context.mongoses: + cls.mongos_clients.append( + await cls.unmanaged_async_single_client("{}:{}".format(*address)) + ) + + # Speed up the tests by decreasing the heartbeat frequency. + cls.knobs = client_knobs( + heartbeat_frequency=0.1, + min_heartbeat_interval=0.1, + kill_cursor_frequency=0.1, + events_queue_frequency=0.1, + ) + cls.knobs.enable() + + @classmethod + async def _tearDown_class(cls): + cls.knobs.disable() + for client in cls.mongos_clients: + await client.close() + await super()._tearDown_class() + + async def asyncSetUp(self): + await super().asyncSetUp() + # process schemaVersion + # note: we check major schema version during class generation + # note: we do this here because we cannot run assertions in setUpClass + version = Version.from_string(self.TEST_SPEC["schemaVersion"]) + self.assertLessEqual( + version, + self.SCHEMA_VERSION, + f"expected schema version {self.SCHEMA_VERSION} or lower, got {version}", + ) + + # initialize internals + self.match_evaluator = MatchEvaluatorUtil(self) + + def maybe_skip_test(self, spec): + # add any special-casing for skipping tests here + if async_client_context.storage_engine == "mmapv1": + if ( + "Dirty explicit session is discarded" in spec["description"] + or "Dirty implicit session is discarded" in spec["description"] + or "Cancel server check" in spec["description"] + ): + self.skipTest("MMAPv1 does not support retryWrites=True") + if ( + "AsyncDatabase-level aggregate with $out includes read preference for 5.0+ server" + in spec["description"] + ): + if async_client_context.version[0] == 8: + self.skipTest("waiting on PYTHON-4356") + if "Aggregate with $out includes read preference for 5.0+ server" in spec["description"]: + if async_client_context.version[0] == 8: + self.skipTest("waiting on PYTHON-4356") + if "Client side error in command starting transaction" in spec["description"]: + self.skipTest("Implement PYTHON-1894") + if "timeoutMS applied to entire download" in spec["description"]: + self.skipTest("PyMongo's open_download_stream does not cap the stream's lifetime") + + class_name = self.__class__.__name__.lower() + description = spec["description"].lower() + if "csot" in class_name: + if "gridfs" in class_name and sys.platform == "win32": + self.skipTest("PYTHON-3522 CSOT GridFS tests are flaky on Windows") + if async_client_context.storage_engine == "mmapv1": + self.skipTest( + "MMAPv1 does not support retryable writes which is required for CSOT tests" + ) + if "change" in description or "change" in class_name: + self.skipTest("CSOT not implemented for watch()") + if "cursors" in class_name: + self.skipTest("CSOT not implemented for cursors") + if "tailable" in class_name: + self.skipTest("CSOT not implemented for tailable cursors") + if "sessions" in class_name: + self.skipTest("CSOT not implemented for sessions") + if "withtransaction" in description: + self.skipTest("CSOT not implemented for with_transaction") + if "transaction" in class_name or "transaction" in description: + self.skipTest("CSOT not implemented for transactions") + + # Some tests need to be skipped based on the operations they try to run. + for op in spec["operations"]: + name = op["name"] + if name == "count": + self.skipTest("PyMongo does not support count()") + if name == "listIndexNames": + self.skipTest("PyMongo does not support list_index_names()") + if async_client_context.storage_engine == "mmapv1": + if name == "createChangeStream": + self.skipTest("MMAPv1 does not support change streams") + if name == "withTransaction" or name == "startTransaction": + self.skipTest("MMAPv1 does not support document-level locking") + if not async_client_context.test_commands_enabled: + if name == "failPoint" or name == "targetedFailPoint": + self.skipTest("Test commands must be enabled to use fail points") + if name == "modifyCollection": + self.skipTest("PyMongo does not support modifyCollection") + if "timeoutMode" in op.get("arguments", {}): + self.skipTest("PyMongo does not support timeoutMode") + + def process_error(self, exception, spec): + if isinstance(exception, unittest.SkipTest): + raise + is_error = spec.get("isError") + is_client_error = spec.get("isClientError") + is_timeout_error = spec.get("isTimeoutError") + error_contains = spec.get("errorContains") + error_code = spec.get("errorCode") + error_code_name = spec.get("errorCodeName") + error_labels_contain = spec.get("errorLabelsContain") + error_labels_omit = spec.get("errorLabelsOmit") + expect_result = spec.get("expectResult") + error_response = spec.get("errorResponse") + if error_response: + if isinstance(exception, ClientBulkWriteException): + self.match_evaluator.match_result(error_response, exception.error.details) + else: + self.match_evaluator.match_result(error_response, exception.details) + + if is_error: + # already satisfied because exception was raised + pass + + if is_client_error: + if isinstance(exception, ClientBulkWriteException): + error = exception.error + else: + error = exception + # Connection errors are considered client errors. + if isinstance(error, ConnectionFailure): + self.assertNotIsInstance(error, NotPrimaryError) + elif isinstance(error, (InvalidOperation, ConfigurationError, EncryptionError)): + pass + else: + self.assertNotIsInstance(error, PyMongoError) + + if is_timeout_error: + self.assertIsInstance(exception, PyMongoError) + if not exception.timeout: + # Re-raise the exception for better diagnostics. + raise exception + + if error_contains: + if isinstance(exception, BulkWriteError): + errmsg = str(exception.details).lower() + elif isinstance(exception, ClientBulkWriteException): + errmsg = str(exception.details).lower() + else: + errmsg = str(exception).lower() + self.assertIn(error_contains.lower(), errmsg) + + if error_code: + if isinstance(exception, ClientBulkWriteException): + self.assertEqual(error_code, exception.error.details.get("code")) + else: + self.assertEqual(error_code, exception.details.get("code")) + + if error_code_name: + if isinstance(exception, ClientBulkWriteException): + self.assertEqual(error_code, exception.error.details.get("codeName")) + else: + self.assertEqual(error_code_name, exception.details.get("codeName")) + + if error_labels_contain: + if isinstance(exception, ClientBulkWriteException): + error = exception.error + else: + error = exception + labels = [ + err_label for err_label in error_labels_contain if error.has_error_label(err_label) + ] + self.assertEqual(labels, error_labels_contain) + + if error_labels_omit: + for err_label in error_labels_omit: + if exception.has_error_label(err_label): + self.fail(f"Exception '{exception}' unexpectedly had label '{err_label}'") + + if expect_result: + if isinstance(exception, BulkWriteError): + result = parse_bulk_write_error_result(exception) + self.match_evaluator.match_result(expect_result, result) + elif isinstance(exception, ClientBulkWriteException): + result = parse_client_bulk_write_error_result(exception) + self.match_evaluator.match_result(expect_result, result) + else: + self.fail( + f"expectResult can only be specified with {BulkWriteError} or {ClientBulkWriteException} exceptions" + ) + + return exception + + def __raise_if_unsupported(self, opname, target, *target_types): + if not isinstance(target, target_types): + self.fail(f"Operation {opname} not supported for entity of type {type(target)}") + + async def __entityOperation_createChangeStream(self, target, *args, **kwargs): + if async_client_context.storage_engine == "mmapv1": + self.skipTest("MMAPv1 does not support change streams") + self.__raise_if_unsupported( + "createChangeStream", target, AsyncMongoClient, AsyncDatabase, AsyncCollection + ) + stream = await target.watch(*args, **kwargs) + self.addAsyncCleanup(stream.close) + return stream + + async def _clientOperation_createChangeStream(self, target, *args, **kwargs): + return await self.__entityOperation_createChangeStream(target, *args, **kwargs) + + async def _databaseOperation_createChangeStream(self, target, *args, **kwargs): + return await self.__entityOperation_createChangeStream(target, *args, **kwargs) + + async def _collectionOperation_createChangeStream(self, target, *args, **kwargs): + return await self.__entityOperation_createChangeStream(target, *args, **kwargs) + + async def _databaseOperation_runCommand(self, target, **kwargs): + self.__raise_if_unsupported("runCommand", target, AsyncDatabase) + # Ensure the first key is the command name. + ordered_command = SON([(kwargs.pop("command_name"), 1)]) + ordered_command.update(kwargs["command"]) + kwargs["command"] = ordered_command + return await target.command(**kwargs) + + async def _databaseOperation_runCursorCommand(self, target, **kwargs): + return list(await self._databaseOperation_createCommandCursor(target, **kwargs)) + + async def _databaseOperation_createCommandCursor(self, target, **kwargs): + self.__raise_if_unsupported("createCommandCursor", target, AsyncDatabase) + # Ensure the first key is the command name. + ordered_command = SON([(kwargs.pop("command_name"), 1)]) + ordered_command.update(kwargs["command"]) + kwargs["command"] = ordered_command + batch_size = 0 + + cursor_type = kwargs.pop("cursor_type", "nonTailable") + if cursor_type == CursorType.TAILABLE: + ordered_command["tailable"] = True + elif cursor_type == CursorType.TAILABLE_AWAIT: + ordered_command["tailable"] = True + ordered_command["awaitData"] = True + elif cursor_type != "nonTailable": + self.fail(f"unknown cursorType: {cursor_type}") + + if "maxTimeMS" in kwargs: + kwargs["max_await_time_ms"] = kwargs.pop("maxTimeMS") + + if "batch_size" in kwargs: + batch_size = kwargs.pop("batch_size") + + cursor = await target.cursor_command(**kwargs) + + if batch_size > 0: + cursor.batch_size(batch_size) + + return cursor + + async def kill_all_sessions(self): + if getattr(self, "client", None) is None: + return + clients = self.mongos_clients if self.mongos_clients else [self.client] + for client in clients: + try: + await client.admin.command("killAllSessions", []) + except OperationFailure: + # "operation was interrupted" by killing the command's + # own session. + pass + + async def _databaseOperation_listCollections(self, target, *args, **kwargs): + if "batch_size" in kwargs: + kwargs["cursor"] = {"batchSize": kwargs.pop("batch_size")} + cursor = await target.list_collections(*args, **kwargs) + return list(cursor) + + async def _databaseOperation_createCollection(self, target, *args, **kwargs): + # PYTHON-1936 Ignore the listCollections event from create_collection. + kwargs["check_exists"] = False + ret = await target.create_collection(*args, **kwargs) + return ret + + async def __entityOperation_aggregate(self, target, *args, **kwargs): + self.__raise_if_unsupported("aggregate", target, AsyncDatabase, AsyncCollection) + return await (await target.aggregate(*args, **kwargs)).to_list() + + async def _databaseOperation_aggregate(self, target, *args, **kwargs): + return await self.__entityOperation_aggregate(target, *args, **kwargs) + + async def _collectionOperation_aggregate(self, target, *args, **kwargs): + return await self.__entityOperation_aggregate(target, *args, **kwargs) + + async def _collectionOperation_find(self, target, *args, **kwargs): + self.__raise_if_unsupported("find", target, AsyncCollection) + find_cursor = target.find(*args, **kwargs) + return await find_cursor.to_list() + + async def _collectionOperation_createFindCursor(self, target, *args, **kwargs): + self.__raise_if_unsupported("find", target, AsyncCollection) + if "filter" not in kwargs: + self.fail('createFindCursor requires a "filter" argument') + cursor = await NonLazyCursor.create(target.find(*args, **kwargs), target.database.client) + self.addAsyncCleanup(cursor.close) + return cursor + + def _collectionOperation_count(self, target, *args, **kwargs): + self.skipTest("PyMongo does not support collection.count()") + + async def _collectionOperation_listIndexes(self, target, *args, **kwargs): + if "batch_size" in kwargs: + self.skipTest("PyMongo does not support batch_size for list_indexes") + return await (await target.list_indexes(*args, **kwargs)).to_list() + + def _collectionOperation_listIndexNames(self, target, *args, **kwargs): + self.skipTest("PyMongo does not support list_index_names") + + async def _collectionOperation_createSearchIndexes(self, target, *args, **kwargs): + models = [SearchIndexModel(**i) for i in kwargs["models"]] + return await target.create_search_indexes(models) + + async def _collectionOperation_listSearchIndexes(self, target, *args, **kwargs): + name = kwargs.get("name") + agg_kwargs = kwargs.get("aggregation_options", dict()) + return await (await target.list_search_indexes(name, **agg_kwargs)).to_list() + + async def _sessionOperation_withTransaction(self, target, *args, **kwargs): + if async_client_context.storage_engine == "mmapv1": + self.skipTest("MMAPv1 does not support document-level locking") + self.__raise_if_unsupported("withTransaction", target, AsyncClientSession) + return await target.with_transaction(*args, **kwargs) + + async def _sessionOperation_startTransaction(self, target, *args, **kwargs): + if async_client_context.storage_engine == "mmapv1": + self.skipTest("MMAPv1 does not support document-level locking") + self.__raise_if_unsupported("startTransaction", target, AsyncClientSession) + return await target.start_transaction(*args, **kwargs) + + async def _changeStreamOperation_iterateUntilDocumentOrError(self, target, *args, **kwargs): + self.__raise_if_unsupported("iterateUntilDocumentOrError", target, AsyncChangeStream) + return await anext(target) + + async def _cursor_iterateUntilDocumentOrError(self, target, *args, **kwargs): + self.__raise_if_unsupported( + "iterateUntilDocumentOrError", target, NonLazyCursor, AsyncCommandCursor + ) + while target.alive: + try: + return await anext(target) + except StopAsyncIteration: + pass + return None + + async def _cursor_close(self, target, *args, **kwargs): + self.__raise_if_unsupported("close", target, NonLazyCursor, AsyncCommandCursor) + return await target.close() + + async def _clientEncryptionOperation_createDataKey(self, target, *args, **kwargs): + if "opts" in kwargs: + kwargs.update(camel_to_snake_args(kwargs.pop("opts"))) + + return await target.create_data_key(*args, **kwargs) + + async def _clientEncryptionOperation_getKeys(self, target, *args, **kwargs): + return await (await target.get_keys(*args, **kwargs)).to_list() + + async def _clientEncryptionOperation_deleteKey(self, target, *args, **kwargs): + result = await target.delete_key(*args, **kwargs) + response = result.raw_result + response["deletedCount"] = result.deleted_count + return response + + async def _clientEncryptionOperation_rewrapManyDataKey(self, target, *args, **kwargs): + if "opts" in kwargs: + kwargs.update(camel_to_snake_args(kwargs.pop("opts"))) + data = await target.rewrap_many_data_key(*args, **kwargs) + if data.bulk_write_result: + return {"bulkWriteResult": parse_bulk_write_result(data.bulk_write_result)} + return {} + + async def _clientEncryptionOperation_encrypt(self, target, *args, **kwargs): + if "opts" in kwargs: + kwargs.update(camel_to_snake_args(kwargs.pop("opts"))) + return await target.encrypt(*args, **kwargs) + + async def _bucketOperation_download( + self, target: AsyncGridFSBucket, *args: Any, **kwargs: Any + ) -> bytes: + async with await target.open_download_stream(*args, **kwargs) as gout: + return await gout.read() + + async def _bucketOperation_downloadByName( + self, target: AsyncGridFSBucket, *args: Any, **kwargs: Any + ) -> bytes: + async with await target.open_download_stream_by_name(*args, **kwargs) as gout: + return await gout.read() + + async def _bucketOperation_upload( + self, target: AsyncGridFSBucket, *args: Any, **kwargs: Any + ) -> ObjectId: + kwargs["source"] = binascii.unhexlify(kwargs.pop("source")["$$hexBytes"]) + if "content_type" in kwargs: + kwargs.setdefault("metadata", {})["contentType"] = kwargs.pop("content_type") + return await target.upload_from_stream(*args, **kwargs) + + async def _bucketOperation_uploadWithId( + self, target: AsyncGridFSBucket, *args: Any, **kwargs: Any + ) -> Any: + kwargs["source"] = binascii.unhexlify(kwargs.pop("source")["$$hexBytes"]) + if "content_type" in kwargs: + kwargs.setdefault("metadata", {})["contentType"] = kwargs.pop("content_type") + return await target.upload_from_stream_with_id(*args, **kwargs) + + async def _bucketOperation_find( + self, target: AsyncGridFSBucket, *args: Any, **kwargs: Any + ) -> List[GridOut]: + return await target.find(*args, **kwargs).to_list() + + async def run_entity_operation(self, spec): + target = self.entity_map[spec["object"]] + opname = spec["name"] + opargs = spec.get("arguments") + expect_error = spec.get("expectError") + save_as_entity = spec.get("saveResultAsEntity") + expect_result = spec.get("expectResult") + ignore = spec.get("ignoreResultAndError") + if ignore and (expect_error or save_as_entity or expect_result): + raise ValueError( + "ignoreResultAndError is incompatible with saveResultAsEntity" + ", expectError, and expectResult" + ) + if opargs: + arguments = parse_spec_options(copy.deepcopy(opargs)) + prepare_spec_arguments( + spec, + arguments, + camel_to_snake(opname), + self.entity_map, + self.run_operations_and_throw, + ) + else: + arguments = {} + + if isinstance(target, AsyncMongoClient): + method_name = f"_clientOperation_{opname}" + elif isinstance(target, AsyncDatabase): + method_name = f"_databaseOperation_{opname}" + elif isinstance(target, AsyncCollection): + method_name = f"_collectionOperation_{opname}" + # contentType is always stored in metadata in pymongo. + if target.name.endswith(".files") and opname == "find": + for doc in spec.get("expectResult", []): + if "contentType" in doc: + doc.setdefault("metadata", {})["contentType"] = doc.pop("contentType") + elif isinstance(target, AsyncChangeStream): + method_name = f"_changeStreamOperation_{opname}" + elif isinstance(target, (NonLazyCursor, AsyncCommandCursor)): + method_name = f"_cursor_{opname}" + elif isinstance(target, AsyncClientSession): + method_name = f"_sessionOperation_{opname}" + elif isinstance(target, AsyncGridFSBucket): + method_name = f"_bucketOperation_{opname}" + if "id" in arguments: + arguments["file_id"] = arguments.pop("id") + # MD5 is always disabled in pymongo. + arguments.pop("disable_md5", None) + elif isinstance(target, AsyncClientEncryption): + method_name = f"_clientEncryptionOperation_{opname}" + else: + method_name = "doesNotExist" + + try: + method = getattr(self, method_name) + except AttributeError: + target_opname = camel_to_snake(opname) + if target_opname == "iterate_once": + target_opname = "try_next" + if target_opname == "client_bulk_write": + target_opname = "bulk_write" + try: + cmd = getattr(target, target_opname) + except AttributeError: + self.fail(f"Unsupported operation {opname} on entity {target}") + else: + cmd = functools.partial(method, target) + + try: + # CSOT: Translate the spec test "timeout" arg into pymongo's context timeout API. + if "timeout" in arguments: + timeout = arguments.pop("timeout") + with pymongo.timeout(timeout): + result = await cmd(**dict(arguments)) + else: + result = await cmd(**dict(arguments)) + except Exception as exc: + # Ignore all operation errors but to avoid masking bugs don't + # ignore things like TypeError and ValueError. + if ignore and isinstance(exc, (PyMongoError,)): + return exc + if expect_error: + if method_name == "_collectionOperation_bulkWrite": + self.skipTest("Skipping test pending PYTHON-4598") + return self.process_error(exc, expect_error) + raise + else: + if method_name == "_collectionOperation_bulkWrite": + self.skipTest("Skipping test pending PYTHON-4598") + if expect_error: + self.fail(f'Excepted error {expect_error} but "{opname}" succeeded: {result}') + + if expect_result: + actual = coerce_result(opname, result) + self.match_evaluator.match_result(expect_result, actual) + + if save_as_entity: + self.entity_map[save_as_entity] = result + return None + return None + + async def __set_fail_point(self, client, command_args): + if not async_client_context.test_commands_enabled: + self.skipTest("Test commands must be enabled") + + cmd_on = SON([("configureFailPoint", "failCommand")]) + cmd_on.update(command_args) + await client.admin.command(cmd_on) + self.addAsyncCleanup( + client.admin.command, "configureFailPoint", cmd_on["configureFailPoint"], mode="off" + ) + + async def _testOperation_failPoint(self, spec): + await self.__set_fail_point( + client=self.entity_map[spec["client"]], command_args=spec["failPoint"] + ) + + async def _testOperation_targetedFailPoint(self, spec): + session = self.entity_map[spec["session"]] + if not session._pinned_address: + self.fail( + "Cannot use targetedFailPoint operation with unpinned " "session {}".format( + spec["session"] + ) + ) + + client = await self.async_single_client("{}:{}".format(*session._pinned_address)) + self.addAsyncCleanup(client.close) + await self.__set_fail_point(client=client, command_args=spec["failPoint"]) + + async def _testOperation_createEntities(self, spec): + await self.entity_map.create_entities_from_spec(spec["entities"], uri=self._uri) + await self.entity_map.advance_cluster_times() + + def _testOperation_assertSessionTransactionState(self, spec): + session = self.entity_map[spec["session"]] + expected_state = getattr(_TxnState, spec["state"].upper()) + self.assertEqual(expected_state, session._transaction.state) + + def _testOperation_assertSessionPinned(self, spec): + session = self.entity_map[spec["session"]] + self.assertIsNotNone(session._transaction.pinned_address) + + def _testOperation_assertSessionUnpinned(self, spec): + session = self.entity_map[spec["session"]] + self.assertIsNone(session._pinned_address) + self.assertIsNone(session._transaction.pinned_address) + + def __get_last_two_command_lsids(self, listener): + cmd_started_events = [] + for event in reversed(listener.events): + if isinstance(event, CommandStartedEvent): + cmd_started_events.append(event) + if len(cmd_started_events) < 2: + self.fail( + "Needed 2 CommandStartedEvents to compare lsids, " + "got %s" % (len(cmd_started_events)) + ) + return tuple([e.command["lsid"] for e in cmd_started_events][:2]) + + def _testOperation_assertDifferentLsidOnLastTwoCommands(self, spec): + listener = self.entity_map.get_listener_for_client(spec["client"]) + self.assertNotEqual(*self.__get_last_two_command_lsids(listener)) + + def _testOperation_assertSameLsidOnLastTwoCommands(self, spec): + listener = self.entity_map.get_listener_for_client(spec["client"]) + self.assertEqual(*self.__get_last_two_command_lsids(listener)) + + def _testOperation_assertSessionDirty(self, spec): + session = self.entity_map[spec["session"]] + self.assertTrue(session._server_session.dirty) + + def _testOperation_assertSessionNotDirty(self, spec): + session = self.entity_map[spec["session"]] + return self.assertFalse(session._server_session.dirty) + + async def _testOperation_assertCollectionExists(self, spec): + database_name = spec["databaseName"] + collection_name = spec["collectionName"] + collection_name_list = list( + await self.client.get_database(database_name).list_collection_names() + ) + self.assertIn(collection_name, collection_name_list) + + async def _testOperation_assertCollectionNotExists(self, spec): + database_name = spec["databaseName"] + collection_name = spec["collectionName"] + collection_name_list = list( + await self.client.get_database(database_name).list_collection_names() + ) + self.assertNotIn(collection_name, collection_name_list) + + async def _testOperation_assertIndexExists(self, spec): + collection = self.client[spec["databaseName"]][spec["collectionName"]] + index_names = [idx["name"] async for idx in await collection.list_indexes()] + self.assertIn(spec["indexName"], index_names) + + async def _testOperation_assertIndexNotExists(self, spec): + collection = self.client[spec["databaseName"]][spec["collectionName"]] + async for index in await collection.list_indexes(): + self.assertNotEqual(spec["indexName"], index["name"]) + + async def _testOperation_assertNumberConnectionsCheckedOut(self, spec): + client = self.entity_map[spec["client"]] + pool = await async_get_pool(client) + self.assertEqual(spec["connections"], pool.active_sockets) + + def _event_count(self, client_name, event): + listener = self.entity_map.get_listener_for_client(client_name) + actual_events = listener.get_events("all") + count = 0 + for actual in actual_events: + try: + self.match_evaluator.match_event(event, actual) + except AssertionError: + continue + else: + count += 1 + return count + + def _testOperation_assertEventCount(self, spec): + """Run the assertEventCount test operation. + + Assert the given event was published exactly `count` times. + """ + client, event, count = spec["client"], spec["event"], spec["count"] + self.assertEqual(self._event_count(client, event), count, f"expected {count} not {event!r}") + + def _testOperation_waitForEvent(self, spec): + """Run the waitForEvent test operation. + + Wait for a number of events to be published, or fail. + """ + client, event, count = spec["client"], spec["event"], spec["count"] + wait_until( + lambda: self._event_count(client, event) >= count, + f"find {count} {event} event(s)", + ) + + async def _testOperation_wait(self, spec): + """Run the "wait" test operation.""" + await asyncio.sleep(spec["ms"] / 1000.0) + + def _testOperation_recordTopologyDescription(self, spec): + """Run the recordTopologyDescription test operation.""" + self.entity_map[spec["id"]] = self.entity_map[spec["client"]].topology_description + + def _testOperation_assertTopologyType(self, spec): + """Run the assertTopologyType test operation.""" + description = self.entity_map[spec["topologyDescription"]] + self.assertIsInstance(description, TopologyDescription) + self.assertEqual(description.topology_type_name, spec["topologyType"]) + + def _testOperation_waitForPrimaryChange(self, spec: dict) -> None: + """Run the waitForPrimaryChange test operation.""" + client = self.entity_map[spec["client"]] + old_description: TopologyDescription = self.entity_map[spec["priorTopologyDescription"]] + timeout = spec["timeoutMS"] / 1000.0 + + def get_primary(td: TopologyDescription) -> Optional[_Address]: + servers = writable_server_selector(Selection.from_topology_description(td)) + if servers and servers[0].server_type == SERVER_TYPE.RSPrimary: + return servers[0].address + return None + + old_primary = get_primary(old_description) + + def primary_changed() -> bool: + primary = client.primary + if primary is None: + return False + return primary != old_primary + + wait_until(primary_changed, "change primary", timeout=timeout) + + def _testOperation_runOnThread(self, spec): + """Run the 'runOnThread' operation.""" + thread = self.entity_map[spec["thread"]] + thread.schedule(lambda: self.run_entity_operation(spec["operation"])) + + def _testOperation_waitForThread(self, spec): + """Run the 'waitForThread' operation.""" + thread = self.entity_map[spec["thread"]] + thread.stop() + thread.join(10) + if thread.exc: + raise thread.exc + self.assertFalse(thread.is_alive(), "Thread {} is still running".format(spec["thread"])) + + async def _testOperation_loop(self, spec): + failure_key = spec.get("storeFailuresAsEntity") + error_key = spec.get("storeErrorsAsEntity") + successes_key = spec.get("storeSuccessesAsEntity") + iteration_key = spec.get("storeIterationsAsEntity") + iteration_limiter_key = spec.get("numIterations") + for i in [failure_key, error_key]: + if i: + self.entity_map[i] = [] + for i in [successes_key, iteration_key]: + if i: + self.entity_map[i] = 0 + i = 0 + global IS_INTERRUPTED + while True: + if iteration_limiter_key and i >= iteration_limiter_key: + break + i += 1 + if IS_INTERRUPTED: + break + try: + if iteration_key: + self.entity_map._entities[iteration_key] += 1 + for op in spec["operations"]: + await self.run_entity_operation(op) + if successes_key: + self.entity_map._entities[successes_key] += 1 + except Exception as exc: + if isinstance(exc, AssertionError): + key = failure_key or error_key + else: + key = error_key or failure_key + if not key: + raise + self.entity_map[key].append( + {"error": str(exc), "time": time.time(), "type": type(exc).__name__} + ) + + async def run_special_operation(self, spec): + opname = spec["name"] + method_name = f"_testOperation_{opname}" + try: + method = getattr(self, method_name) + except AttributeError: + self.fail(f"Unsupported special test operation {opname}") + else: + if iscoroutinefunction(method): + await method(spec["arguments"]) + else: + method(spec["arguments"]) + + async def run_operations(self, spec): + for op in spec: + if op["object"] == "testRunner": + await self.run_special_operation(op) + else: + await self.run_entity_operation(op) + + async def run_operations_and_throw(self, spec): + for op in spec: + if op["object"] == "testRunner": + await self.run_special_operation(op) + else: + result = await self.run_entity_operation(op) + if isinstance(result, Exception): + raise result + + def check_events(self, spec): + for event_spec in spec: + client_name = event_spec["client"] + events = event_spec["events"] + event_type = event_spec.get("eventType", "command") + ignore_extra_events = event_spec.get("ignoreExtraEvents", False) + server_connection_id = event_spec.get("serverConnectionId") + has_server_connection_id = event_spec.get("hasServerConnectionId", False) + listener = self.entity_map.get_listener_for_client(client_name) + actual_events = listener.get_events(event_type) + if ignore_extra_events: + actual_events = actual_events[: len(events)] + + if len(events) == 0: + self.assertEqual(actual_events, []) + continue + + if len(actual_events) != len(events): + expected = "\n".join(str(e) for e in events) + actual = "\n".join(str(a) for a in actual_events) + self.assertEqual( + len(actual_events), + len(events), + f"expected events:\n{expected}\nactual events:\n{actual}", + ) + + for idx, expected_event in enumerate(events): + self.match_evaluator.match_event(expected_event, actual_events[idx]) + + if has_server_connection_id: + assert server_connection_id is not None + assert server_connection_id >= 0 + else: + assert server_connection_id is None + + def process_ignore_messages(self, ignore_logs, actual_logs): + final_logs = [] + for log in actual_logs: + ignored = False + for ignore_log in ignore_logs: + if log["data"]["message"] == ignore_log["data"][ + "message" + ] and self.match_evaluator.match_result(ignore_log, log, test=False): + ignored = True + break + if not ignored: + final_logs.append(log) + return final_logs + + async def check_log_messages(self, operations, spec): + def format_logs(log_list): + client_to_log = defaultdict(list) + for log in log_list: + if log.module == "ocsp_support": + continue + data = json_util.loads(log.getMessage()) + client = data.pop("clientId") if "clientId" in data else data.pop("topologyId") + client_to_log[client].append( + { + "level": log.levelname.lower(), + "component": log.name.replace("pymongo.", "", 1), + "data": data, + } + ) + return client_to_log + + with self.assertLogs("pymongo", level="DEBUG") as cm: + await self.run_operations(operations) + formatted_logs = format_logs(cm.records) + for client in spec: + components = set() + for message in client["messages"]: + components.add(message["component"]) + + clientid = self.entity_map[client["client"]]._topology_settings._topology_id + actual_logs = formatted_logs[clientid] + actual_logs = [log for log in actual_logs if log["component"] in components] + + ignore_logs = client.get("ignoreMessages", []) + if ignore_logs: + actual_logs = self.process_ignore_messages(ignore_logs, actual_logs) + + if client.get("ignoreExtraMessages", False): + actual_logs = actual_logs[: len(client["messages"])] + self.assertEqual( + len(client["messages"]), + len(actual_logs), + f"expected {client['messages']} but got {actual_logs}", + ) + for expected_msg, actual_msg in zip(client["messages"], actual_logs): + expected_data, actual_data = expected_msg.pop("data"), actual_msg.pop("data") + + if "failureIsRedacted" in expected_msg: + self.assertIn("failure", actual_data) + should_redact = expected_msg.pop("failureIsRedacted") + if should_redact: + actual_fields = set(json_util.loads(actual_data["failure"]).keys()) + self.assertTrue( + {"code", "codeName", "errorLabels"}.issuperset(actual_fields) + ) + + self.match_evaluator.match_result(expected_data, actual_data) + self.match_evaluator.match_result(expected_msg, actual_msg) + + async def verify_outcome(self, spec): + for collection_data in spec: + coll_name = collection_data["collectionName"] + db_name = collection_data["databaseName"] + expected_documents = collection_data["documents"] + + coll = self.client.get_database(db_name).get_collection( + coll_name, + read_preference=ReadPreference.PRIMARY, + read_concern=ReadConcern(level="local"), + ) + + if expected_documents: + sorted_expected_documents = sorted(expected_documents, key=lambda doc: doc["_id"]) + actual_documents = await coll.find({}, sort=[("_id", ASCENDING)]).to_list() + self.assertListEqual(sorted_expected_documents, actual_documents) + + async def run_scenario(self, spec, uri=None): + if "csot" in self.id().lower() and SKIP_CSOT_TESTS: + raise unittest.SkipTest("SKIP_CSOT_TESTS is set, skipping...") + + # Kill all sessions before and after each test to prevent an open + # transaction (from a test failure) from blocking collection/database + # operations during test set up and tear down. + await self.kill_all_sessions() + self.addAsyncCleanup(self.kill_all_sessions) + + if "csot" in self.id().lower(): + # Retry CSOT tests up to 2 times to deal with flakey tests. + attempts = 3 + for i in range(attempts): + try: + return await self._run_scenario(spec, uri) + except AssertionError: + if i < attempts - 1: + print( + f"Retrying after attempt {i+1} of {self.id()} failed with:\n" + f"{traceback.format_exc()}", + file=sys.stderr, + ) + await self.asyncSetUp() + continue + raise + return None + else: + await self._run_scenario(spec, uri) + return None + + async def _run_scenario(self, spec, uri=None): + # maybe skip test manually + self.maybe_skip_test(spec) + + # process test-level runOnRequirements + run_on_spec = spec.get("runOnRequirements", []) + if not await self.should_run_on(run_on_spec): + raise unittest.SkipTest("runOnRequirements not satisfied") + + # process skipReason + skip_reason = spec.get("skipReason", None) + if skip_reason is not None: + raise unittest.SkipTest(f"{skip_reason}") + + # process createEntities + self._uri = uri + self.entity_map = EntityMapUtil(self) + await self.entity_map.create_entities_from_spec( + self.TEST_SPEC.get("createEntities", []), uri=uri + ) + # process initialData + if "initialData" in self.TEST_SPEC: + await self.insert_initial_data(self.TEST_SPEC["initialData"]) + self._cluster_time = (await self.client.admin.command("ping")).get("$clusterTime") + await self.entity_map.advance_cluster_times() + + if "expectLogMessages" in spec: + expect_log_messages = spec["expectLogMessages"] + self.assertTrue(expect_log_messages, "expectEvents must be non-empty") + await self.check_log_messages(spec["operations"], expect_log_messages) + else: + # process operations + await self.run_operations(spec["operations"]) + + # process expectEvents + if "expectEvents" in spec: + expect_events = spec["expectEvents"] + self.assertTrue(expect_events, "expectEvents must be non-empty") + self.check_events(expect_events) + + # process outcome + await self.verify_outcome(spec.get("outcome", [])) + + +class UnifiedSpecTestMeta(type): + """Metaclass for generating test classes.""" + + TEST_SPEC: Any + EXPECTED_FAILURES: Any + + def __init__(cls, *args, **kwargs): + super().__init__(*args, **kwargs) + + def create_test(spec): + async def test_case(self): + await self.run_scenario(spec) + + return test_case + + for test_spec in cls.TEST_SPEC["tests"]: + description = test_spec["description"] + test_name = "test_{}".format( + description.strip(". ").replace(" ", "_").replace(".", "_") + ) + test_method = create_test(copy.deepcopy(test_spec)) + test_method.__name__ = str(test_name) + + for fail_pattern in cls.EXPECTED_FAILURES: + if re.search(fail_pattern, description): + test_method = unittest.expectedFailure(test_method) + break + + setattr(cls, test_name, test_method) + + +_ALL_MIXIN_CLASSES = [ + UnifiedSpecTestMixinV1, + # add mixin classes for new schema major versions here +] + + +_SCHEMA_VERSION_MAJOR_TO_MIXIN_CLASS = { + KLASS.SCHEMA_VERSION[0]: KLASS for KLASS in _ALL_MIXIN_CLASSES +} + + +def generate_test_classes( + test_path, + module=__name__, + class_name_prefix="", + expected_failures=[], # noqa: B006 + bypass_test_generation_errors=False, + **kwargs, +): + """Method for generating test classes. Returns a dictionary where keys are + the names of test classes and values are the test class objects. + """ + test_klasses = {} + + def test_base_class_factory(test_spec): + """Utility that creates the base class to use for test generation. + This is needed to ensure that cls.TEST_SPEC is appropriately set when + the metaclass __init__ is invoked. + """ + + class SpecTestBase(with_metaclass(UnifiedSpecTestMeta)): # type: ignore + TEST_SPEC = test_spec + EXPECTED_FAILURES = expected_failures + + return SpecTestBase + + for dirpath, _, filenames in os.walk(test_path): + dirname = os.path.split(dirpath)[-1] + + for filename in filenames: + fpath = os.path.join(dirpath, filename) + with open(fpath) as scenario_stream: + # Use tz_aware=False to match how CodecOptions decodes + # dates. + opts = json_util.JSONOptions(tz_aware=False) + scenario_def = json_util.loads(scenario_stream.read(), json_options=opts) + + test_type = os.path.splitext(filename)[0] + snake_class_name = "Test{}_{}_{}".format( + class_name_prefix, + dirname.replace("-", "_"), + test_type.replace("-", "_").replace(".", "_"), + ) + class_name = snake_to_camel(snake_class_name) + + try: + schema_version = Version.from_string(scenario_def["schemaVersion"]) + mixin_class = _SCHEMA_VERSION_MAJOR_TO_MIXIN_CLASS.get(schema_version[0]) + if mixin_class is None: + raise ValueError( + f"test file '{fpath}' has unsupported schemaVersion '{schema_version}'" + ) + module_dict = {"__module__": module, "TEST_PATH": test_path} + module_dict.update(kwargs) + test_klasses[class_name] = type( + class_name, + ( + mixin_class, + test_base_class_factory(scenario_def), + ), + module_dict, + ) + except Exception: + if bypass_test_generation_errors: + continue + raise + + return test_klasses diff --git a/test/unified_format.py b/test/unified_format.py index 62211d3d25..6a19082b86 100644 --- a/test/unified_format.py +++ b/test/unified_format.py @@ -18,41 +18,41 @@ """ from __future__ import annotations +import asyncio import binascii -import collections import copy -import datetime import functools import os import re import sys import time import traceback -import types -from collections import abc, defaultdict +from asyncio import iscoroutinefunction +from collections import defaultdict from test import ( IntegrationTest, client_context, client_knobs, unittest, ) -from test.helpers import ( - AWS_CREDS, - AWS_CREDS_2, - AZURE_CREDS, - CA_PEM, - CLIENT_PEM, - GCP_CREDS, - KMIP_CREDS, - LOCAL_MASTER_KEY, - client_knobs, +from test.unified_format_shared import ( + IS_INTERRUPTED, + KMS_TLS_OPTS, + PLACEHOLDER_MAP, + SKIP_CSOT_TESTS, + EventListenerUtil, + MatchEvaluatorUtil, + coerce_result, + parse_bulk_write_error_result, + parse_bulk_write_result, + parse_client_bulk_write_error_result, + parse_collection_or_database_options, + with_metaclass, ) from test.utils import ( - CMAPListener, camel_to_snake, camel_to_snake_args, get_pool, - parse_collection_options, parse_spec_options, prepare_spec_arguments, snake_to_camel, @@ -60,14 +60,12 @@ ) from test.utils_spec_runner import SpecRunnerThread from test.version import Version -from typing import Any, Dict, List, Mapping, Optional, Union +from typing import Any, Dict, List, Mapping, Optional import pymongo -from bson import SON, Code, DBRef, Decimal128, Int64, MaxKey, MinKey, json_util -from bson.binary import Binary +from bson import SON, json_util from bson.codec_options import DEFAULT_CODEC_OPTIONS from bson.objectid import ObjectId -from bson.regex import RE_TYPE, Regex from gridfs import GridFSBucket, GridOut from pymongo import ASCENDING, CursorType, MongoClient, _csot from pymongo.encryption_options import _HAVE_PYMONGOCRYPT @@ -83,55 +81,14 @@ PyMongoError, ) from pymongo.monitoring import ( - _SENSITIVE_COMMANDS, - CommandFailedEvent, - CommandListener, CommandStartedEvent, - CommandSucceededEvent, - ConnectionCheckedInEvent, - ConnectionCheckedOutEvent, - ConnectionCheckOutFailedEvent, - ConnectionCheckOutStartedEvent, - ConnectionClosedEvent, - ConnectionCreatedEvent, - ConnectionReadyEvent, - PoolClearedEvent, - PoolClosedEvent, - PoolCreatedEvent, - PoolReadyEvent, - ServerClosedEvent, - ServerDescriptionChangedEvent, - ServerHeartbeatFailedEvent, - ServerHeartbeatListener, - ServerHeartbeatStartedEvent, - ServerHeartbeatSucceededEvent, - ServerListener, - ServerOpeningEvent, - TopologyClosedEvent, - TopologyDescriptionChangedEvent, - TopologyEvent, - TopologyListener, - TopologyOpenedEvent, - _CommandEvent, - _ConnectionEvent, - _PoolEvent, - _ServerEvent, - _ServerHeartbeatEvent, ) from pymongo.operations import ( - DeleteMany, - DeleteOne, - InsertOne, - ReplaceOne, SearchIndexModel, - UpdateMany, - UpdateOne, ) from pymongo.read_concern import ReadConcern from pymongo.read_preferences import ReadPreference -from pymongo.results import BulkWriteResult, ClientBulkWriteResult from pymongo.server_api import ServerApi -from pymongo.server_description import ServerDescription from pymongo.server_selectors import Selection, writable_server_selector from pymongo.server_type import SERVER_TYPE from pymongo.synchronous.change_stream import ChangeStream @@ -140,85 +97,12 @@ from pymongo.synchronous.command_cursor import CommandCursor from pymongo.synchronous.database import Database from pymongo.synchronous.encryption import ClientEncryption +from pymongo.synchronous.helpers import next from pymongo.topology_description import TopologyDescription from pymongo.typings import _Address from pymongo.write_concern import WriteConcern -SKIP_CSOT_TESTS = os.getenv("SKIP_CSOT_TESTS") - -JSON_OPTS = json_util.JSONOptions(tz_aware=False) - -IS_INTERRUPTED = False - -KMS_TLS_OPTS = { - "kmip": { - "tlsCAFile": CA_PEM, - "tlsCertificateKeyFile": CLIENT_PEM, - } -} - - -# Build up a placeholder maps. -PLACEHOLDER_MAP = {} -for provider_name, provider_data in [ - ("local", {"key": LOCAL_MASTER_KEY}), - ("local:name1", {"key": LOCAL_MASTER_KEY}), - ("aws", AWS_CREDS), - ("aws:name1", AWS_CREDS), - ("aws:name2", AWS_CREDS_2), - ("azure", AZURE_CREDS), - ("azure:name1", AZURE_CREDS), - ("gcp", GCP_CREDS), - ("gcp:name1", GCP_CREDS), - ("kmip", KMIP_CREDS), - ("kmip:name1", KMIP_CREDS), -]: - for key, value in provider_data.items(): - placeholder = f"/clientEncryptionOpts/kmsProviders/{provider_name}/{key}" - PLACEHOLDER_MAP[placeholder] = value - -OIDC_ENV = os.environ.get("OIDC_ENV", "test") -if OIDC_ENV == "test": - PLACEHOLDER_MAP["/uriOptions/authMechanismProperties"] = {"ENVIRONMENT": "test"} -elif OIDC_ENV == "azure": - PLACEHOLDER_MAP["/uriOptions/authMechanismProperties"] = { - "ENVIRONMENT": "azure", - "TOKEN_RESOURCE": os.environ["AZUREOIDC_RESOURCE"], - } -elif OIDC_ENV == "gcp": - PLACEHOLDER_MAP["/uriOptions/authMechanismProperties"] = { - "ENVIRONMENT": "gcp", - "TOKEN_RESOURCE": os.environ["GCPOIDC_AUDIENCE"], - } - - -def interrupt_loop(): - global IS_INTERRUPTED - IS_INTERRUPTED = True - - -def with_metaclass(meta, *bases): - """Create a base class with a metaclass. - - Vendored from six: https://github.com/benjaminp/six/blob/master/six.py - """ - - # This requires a bit of explanation: the basic idea is to make a dummy - # metaclass for one level of class instantiation that replaces itself with - # the actual metaclass. - class metaclass(type): - def __new__(cls, name, this_bases, d): - # __orig_bases__ is required by PEP 560. - resolved_bases = types.resolve_bases(bases) - if resolved_bases is not bases: - d["__orig_bases__"] = bases - return meta(name, resolved_bases, d) - - @classmethod - def __prepare__(cls, name, this_bases): - return meta.__prepare__(name, bases) - - return type.__new__(metaclass, "temporary_class", (), {}) +_IS_SYNC = True def is_run_on_requirement_satisfied(requirement): @@ -283,77 +167,6 @@ def is_run_on_requirement_satisfied(requirement): ) -def parse_collection_or_database_options(options): - return parse_collection_options(options) - - -def parse_bulk_write_result(result): - upserted_ids = {str(int_idx): result.upserted_ids[int_idx] for int_idx in result.upserted_ids} - return { - "deletedCount": result.deleted_count, - "insertedCount": result.inserted_count, - "matchedCount": result.matched_count, - "modifiedCount": result.modified_count, - "upsertedCount": result.upserted_count, - "upsertedIds": upserted_ids, - } - - -def parse_client_bulk_write_individual(op_type, result): - if op_type == "insert": - return {"insertedId": result.inserted_id} - if op_type == "update": - if result.upserted_id: - return { - "matchedCount": result.matched_count, - "modifiedCount": result.modified_count, - "upsertedId": result.upserted_id, - } - else: - return { - "matchedCount": result.matched_count, - "modifiedCount": result.modified_count, - } - if op_type == "delete": - return { - "deletedCount": result.deleted_count, - } - - -def parse_client_bulk_write_result(result): - insert_results, update_results, delete_results = {}, {}, {} - if result.has_verbose_results: - for idx, res in result.insert_results.items(): - insert_results[str(idx)] = parse_client_bulk_write_individual("insert", res) - for idx, res in result.update_results.items(): - update_results[str(idx)] = parse_client_bulk_write_individual("update", res) - for idx, res in result.delete_results.items(): - delete_results[str(idx)] = parse_client_bulk_write_individual("delete", res) - - return { - "deletedCount": result.deleted_count, - "insertedCount": result.inserted_count, - "matchedCount": result.matched_count, - "modifiedCount": result.modified_count, - "upsertedCount": result.upserted_count, - "insertResults": insert_results, - "updateResults": update_results, - "deleteResults": delete_results, - } - - -def parse_bulk_write_error_result(error): - write_result = BulkWriteResult(error.details, True) - return parse_bulk_write_result(write_result) - - -def parse_client_bulk_write_error_result(error): - write_result = error.partial_result - if not write_result: - return None - return parse_client_bulk_write_result(write_result) - - class NonLazyCursor: """A find cursor proxy that creates the remote cursor when initialized.""" @@ -361,7 +174,16 @@ def __init__(self, find_cursor, client): self.client = client self.find_cursor = find_cursor # Create the server side cursor. - self.first_result = next(find_cursor, None) + self.first_result = None + + @classmethod + def create(cls, find_cursor, client): + cursor = cls(find_cursor, client) + try: + cursor.first_result = next(cursor.find_cursor) + except StopIteration: + cursor.first_result = None + return cursor @property def alive(self): @@ -382,105 +204,6 @@ def close(self): self.client = None -class EventListenerUtil( - CMAPListener, CommandListener, ServerListener, ServerHeartbeatListener, TopologyListener -): - def __init__( - self, observe_events, ignore_commands, observe_sensitive_commands, store_events, entity_map - ): - self._event_types = {name.lower() for name in observe_events} - if observe_sensitive_commands: - self._observe_sensitive_commands = True - self._ignore_commands = set(ignore_commands) - else: - self._observe_sensitive_commands = False - self._ignore_commands = _SENSITIVE_COMMANDS | set(ignore_commands) - self._ignore_commands.add("configurefailpoint") - self._event_mapping = collections.defaultdict(list) - self.entity_map = entity_map - if store_events: - for i in store_events: - id = i["id"] - events = (i.lower() for i in i["events"]) - for i in events: - self._event_mapping[i].append(id) - self.entity_map[id] = [] - super().__init__() - - def get_events(self, event_type): - assert event_type in ("command", "cmap", "sdam", "all"), event_type - if event_type == "all": - return list(self.events) - if event_type == "command": - return [e for e in self.events if isinstance(e, _CommandEvent)] - if event_type == "cmap": - return [e for e in self.events if isinstance(e, (_ConnectionEvent, _PoolEvent))] - return [ - e - for e in self.events - if isinstance(e, (_ServerEvent, TopologyEvent, _ServerHeartbeatEvent)) - ] - - def add_event(self, event): - event_name = type(event).__name__.lower() - if event_name in self._event_types: - super().add_event(event) - for id in self._event_mapping[event_name]: - self.entity_map[id].append( - { - "name": type(event).__name__, - "observedAt": time.time(), - "description": repr(event), - } - ) - - def _command_event(self, event): - if event.command_name.lower() not in self._ignore_commands: - self.add_event(event) - - def started(self, event): - if isinstance(event, CommandStartedEvent): - if event.command == {}: - # Command is redacted. Observe only if flag is set. - if self._observe_sensitive_commands: - self._command_event(event) - else: - self._command_event(event) - else: - self.add_event(event) - - def succeeded(self, event): - if isinstance(event, CommandSucceededEvent): - if event.reply == {}: - # Command is redacted. Observe only if flag is set. - if self._observe_sensitive_commands: - self._command_event(event) - else: - self._command_event(event) - else: - self.add_event(event) - - def failed(self, event): - if isinstance(event, CommandFailedEvent): - self._command_event(event) - else: - self.add_event(event) - - def opened(self, event: Union[ServerOpeningEvent, TopologyOpenedEvent]) -> None: - self.add_event(event) - - def description_changed( - self, event: Union[ServerDescriptionChangedEvent, TopologyDescriptionChangedEvent] - ) -> None: - self.add_event(event) - - def topology_changed(self, event: TopologyDescriptionChangedEvent) -> None: - self.add_event(event) - - def closed(self, event: Union[ServerClosedEvent, TopologyClosedEvent]) -> None: - self.add_event(event) - - class EntityMapUtil: """Utility class that implements an entity map as per the unified test format specification. @@ -692,353 +415,12 @@ def get_lsid_for_session(self, session_name): def advance_cluster_times(self) -> None: """Manually synchronize entities when desired""" if not self._cluster_time: - self._cluster_time = self.test.client.admin.command("ping").get("$clusterTime") + self._cluster_time = (self.test.client.admin.command("ping")).get("$clusterTime") for entity in self._entities.values(): if isinstance(entity, ClientSession) and self._cluster_time: entity.advance_cluster_time(self._cluster_time) -binary_types = (Binary, bytes) -long_types = (Int64,) -unicode_type = str - - -BSON_TYPE_ALIAS_MAP = { - # https://mongodb.com/docs/manual/reference/operator/query/type/ - # https://pymongo.readthedocs.io/en/stable/api/bson/index.html - "double": (float,), - "string": (str,), - "object": (abc.Mapping,), - "array": (abc.MutableSequence,), - "binData": binary_types, - "undefined": (type(None),), - "objectId": (ObjectId,), - "bool": (bool,), - "date": (datetime.datetime,), - "null": (type(None),), - "regex": (Regex, RE_TYPE), - "dbPointer": (DBRef,), - "javascript": (unicode_type, Code), - "symbol": (unicode_type,), - "javascriptWithScope": (unicode_type, Code), - "int": (int,), - "long": (Int64,), - "decimal": (Decimal128,), - "maxKey": (MaxKey,), - "minKey": (MinKey,), -} - - -class MatchEvaluatorUtil: - """Utility class that implements methods for evaluating matches as per - the unified test format specification. - """ - - def __init__(self, test_class): - self.test = test_class - - def _operation_exists(self, spec, actual, key_to_compare): - if spec is True: - if key_to_compare is None: - assert actual is not None - else: - self.test.assertIn(key_to_compare, actual) - elif spec is False: - if key_to_compare is None: - assert actual is None - else: - self.test.assertNotIn(key_to_compare, actual) - else: - self.test.fail(f"Expected boolean value for $$exists operator, got {spec}") - - def __type_alias_to_type(self, alias): - if alias not in BSON_TYPE_ALIAS_MAP: - self.test.fail(f"Unrecognized BSON type alias {alias}") - return BSON_TYPE_ALIAS_MAP[alias] - - def _operation_type(self, spec, actual, key_to_compare): - if isinstance(spec, abc.MutableSequence): - permissible_types = tuple( - [t for alias in spec for t in self.__type_alias_to_type(alias)] - ) - else: - permissible_types = self.__type_alias_to_type(spec) - value = actual[key_to_compare] if key_to_compare else actual - self.test.assertIsInstance(value, permissible_types) - - def _operation_matchesEntity(self, spec, actual, key_to_compare): - expected_entity = self.test.entity_map[spec] - self.test.assertEqual(expected_entity, actual[key_to_compare]) - - def _operation_matchesHexBytes(self, spec, actual, key_to_compare): - expected = binascii.unhexlify(spec) - value = actual[key_to_compare] if key_to_compare else actual - self.test.assertEqual(value, expected) - - def _operation_unsetOrMatches(self, spec, actual, key_to_compare): - if key_to_compare is None and not actual: - # top-level document can be None when unset - return - - if key_to_compare not in actual: - # we add a dummy value for the compared key to pass map size check - actual[key_to_compare] = "dummyValue" - return - self.match_result(spec, actual[key_to_compare], in_recursive_call=True) - - def _operation_sessionLsid(self, spec, actual, key_to_compare): - expected_lsid = self.test.entity_map.get_lsid_for_session(spec) - self.test.assertEqual(expected_lsid, actual[key_to_compare]) - - def _operation_lte(self, spec, actual, key_to_compare): - if key_to_compare not in actual: - self.test.fail(f"Actual command is missing the {key_to_compare} field: {spec}") - self.test.assertLessEqual(actual[key_to_compare], spec) - - def _operation_matchAsDocument(self, spec, actual, key_to_compare): - self._match_document(spec, json_util.loads(actual[key_to_compare]), False) - - def _operation_matchAsRoot(self, spec, actual, key_to_compare): - self._match_document(spec, actual, True) - - def _evaluate_special_operation(self, opname, spec, actual, key_to_compare): - method_name = "_operation_{}".format(opname.strip("$")) - try: - method = getattr(self, method_name) - except AttributeError: - self.test.fail(f"Unsupported special matching operator {opname}") - else: - method(spec, actual, key_to_compare) - - def _evaluate_if_special_operation(self, expectation, actual, key_to_compare=None): - """Returns True if a special operation is evaluated, False - otherwise. If the ``expectation`` map contains a single key, - value pair we check it for a special operation. - If given, ``key_to_compare`` is assumed to be the key in - ``expectation`` whose corresponding value needs to be - evaluated for a possible special operation. ``key_to_compare`` - is ignored when ``expectation`` has only one key. - """ - if not isinstance(expectation, abc.Mapping): - return False - - is_special_op, opname, spec = False, False, False - - if key_to_compare is not None: - if key_to_compare.startswith("$$"): - is_special_op = True - opname = key_to_compare - spec = expectation[key_to_compare] - key_to_compare = None - else: - nested = expectation[key_to_compare] - if isinstance(nested, abc.Mapping) and len(nested) == 1: - opname, spec = next(iter(nested.items())) - if opname.startswith("$$"): - is_special_op = True - elif len(expectation) == 1: - opname, spec = next(iter(expectation.items())) - if opname.startswith("$$"): - is_special_op = True - key_to_compare = None - - if is_special_op: - self._evaluate_special_operation( - opname=opname, spec=spec, actual=actual, key_to_compare=key_to_compare - ) - return True - - return False - - def _match_document(self, expectation, actual, is_root, test=False): - if self._evaluate_if_special_operation(expectation, actual): - return - - self.test.assertIsInstance(actual, abc.Mapping) - for key, value in expectation.items(): - if self._evaluate_if_special_operation(expectation, actual, key): - continue - - self.test.assertIn(key, actual) - if not self.match_result(value, actual[key], in_recursive_call=True, test=test): - return False - - if not is_root: - expected_keys = set(expectation.keys()) - for key, value in expectation.items(): - if value == {"$$exists": False}: - expected_keys.remove(key) - if test: - self.test.assertEqual(expected_keys, set(actual.keys())) - else: - return set(expected_keys).issubset(set(actual.keys())) - return True - - def match_result(self, expectation, actual, in_recursive_call=False, test=True): - if isinstance(expectation, abc.Mapping): - return self._match_document( - expectation, actual, is_root=not in_recursive_call, test=test - ) - - if isinstance(expectation, abc.MutableSequence): - self.test.assertIsInstance(actual, abc.MutableSequence) - for e, a in zip(expectation, actual): - if isinstance(e, abc.Mapping): - self._match_document(e, a, is_root=not in_recursive_call, test=test) - else: - self.match_result(e, a, in_recursive_call=True, test=test) - return None - - # account for flexible numerics in element-wise comparison - if isinstance(expectation, int) or isinstance(expectation, float): - if test: - self.test.assertEqual(expectation, actual) - else: - return expectation == actual - return None - else: - if test: - self.test.assertIsInstance(actual, type(expectation)) - self.test.assertEqual(expectation, actual) - else: - return isinstance(actual, type(expectation)) and expectation == actual - return None - - def match_server_description(self, actual: ServerDescription, spec: dict) -> None: - for field, expected in spec.items(): - field = camel_to_snake(field) - if field == "type": - field = "server_type_name" - self.test.assertEqual(getattr(actual, field), expected) - - def match_topology_description(self, actual: TopologyDescription, spec: dict) -> None: - for field, expected in spec.items(): - field = camel_to_snake(field) - if field == "type": - field = "topology_type_name" - self.test.assertEqual(getattr(actual, field), expected) - - def match_event_fields(self, actual: Any, spec: dict) -> None: - for field, expected in spec.items(): - if field == "command" and isinstance(actual, CommandStartedEvent): - command = spec["command"] - if command: - self.match_result(command, actual.command) - continue - if field == "reply" and isinstance(actual, CommandSucceededEvent): - reply = spec["reply"] - if reply: - self.match_result(reply, actual.reply) - continue - if field == "hasServiceId": - if spec["hasServiceId"]: - self.test.assertIsNotNone(actual.service_id) - self.test.assertIsInstance(actual.service_id, ObjectId) - else: - self.test.assertIsNone(actual.service_id) - continue - if field == "hasServerConnectionId": - if spec["hasServerConnectionId"]: - self.test.assertIsNotNone(actual.server_connection_id) - self.test.assertIsInstance(actual.server_connection_id, int) - else: - self.test.assertIsNone(actual.server_connection_id) - continue - if field in ("previousDescription", "newDescription"): - if isinstance(actual, ServerDescriptionChangedEvent): - self.match_server_description( - getattr(actual, camel_to_snake(field)), spec[field] - ) - continue - if isinstance(actual, TopologyDescriptionChangedEvent): - self.match_topology_description( - getattr(actual, camel_to_snake(field)), spec[field] - ) - continue - - if field == "interruptInUseConnections": - field = "interrupt_connections" - else: - field = camel_to_snake(field) - self.test.assertEqual(getattr(actual, field), expected) - - def match_event(self, expectation, actual): - name, spec = next(iter(expectation.items())) - if name == "commandStartedEvent": - self.test.assertIsInstance(actual, CommandStartedEvent) - elif name == "commandSucceededEvent": - self.test.assertIsInstance(actual, CommandSucceededEvent) - elif name == "commandFailedEvent": - self.test.assertIsInstance(actual, CommandFailedEvent) - elif name == "poolCreatedEvent": - self.test.assertIsInstance(actual, PoolCreatedEvent) - elif name == "poolReadyEvent": - self.test.assertIsInstance(actual, PoolReadyEvent) - elif name == "poolClearedEvent": - self.test.assertIsInstance(actual, PoolClearedEvent) - self.test.assertIsInstance(actual.interrupt_connections, bool) - elif name == "poolClosedEvent": - self.test.assertIsInstance(actual, PoolClosedEvent) - elif name == "connectionCreatedEvent": - self.test.assertIsInstance(actual, ConnectionCreatedEvent) - elif name == "connectionReadyEvent": - self.test.assertIsInstance(actual, ConnectionReadyEvent) - elif name == "connectionClosedEvent": - self.test.assertIsInstance(actual, ConnectionClosedEvent) - elif name == "connectionCheckOutStartedEvent": - self.test.assertIsInstance(actual, ConnectionCheckOutStartedEvent) - elif name == "connectionCheckOutFailedEvent": - self.test.assertIsInstance(actual, ConnectionCheckOutFailedEvent) - elif name == "connectionCheckedOutEvent": - self.test.assertIsInstance(actual, ConnectionCheckedOutEvent) - elif name == "connectionCheckedInEvent": - self.test.assertIsInstance(actual, ConnectionCheckedInEvent) - elif name == "serverDescriptionChangedEvent": - self.test.assertIsInstance(actual, ServerDescriptionChangedEvent) - elif name == "serverHeartbeatStartedEvent": - self.test.assertIsInstance(actual, ServerHeartbeatStartedEvent) - elif name == "serverHeartbeatSucceededEvent": - self.test.assertIsInstance(actual, ServerHeartbeatSucceededEvent) - elif name == "serverHeartbeatFailedEvent": - self.test.assertIsInstance(actual, ServerHeartbeatFailedEvent) - elif name == "topologyDescriptionChangedEvent": - self.test.assertIsInstance(actual, TopologyDescriptionChangedEvent) - elif name == "topologyOpeningEvent": - self.test.assertIsInstance(actual, TopologyOpenedEvent) - elif name == "topologyClosedEvent": - self.test.assertIsInstance(actual, TopologyClosedEvent) - else: - raise Exception(f"Unsupported event type {name}") - - self.match_event_fields(actual, spec) - - -def coerce_result(opname, result): - """Convert a pymongo result into the spec's result format.""" - if hasattr(result, "acknowledged") and not result.acknowledged: - return {"acknowledged": False} - if opname == "bulkWrite": - return parse_bulk_write_result(result) - if opname == "clientBulkWrite": - return parse_client_bulk_write_result(result) - if opname == "insertOne": - return {"insertedId": result.inserted_id} - if opname == "insertMany": - return dict(enumerate(result.inserted_ids)) - if opname in ("deleteOne", "deleteMany"): - return {"deletedCount": result.deleted_count} - if opname in ("updateOne", "updateMany", "replaceOne"): - value = { - "matchedCount": result.matched_count, - "modifiedCount": result.modified_count, - "upsertedCount": 0 if result.upserted_id is None else 1, - } - if result.upserted_id is not None: - value["upsertedId"] = result.upserted_id - return value - return result - - class UnifiedSpecTestMixinV1(IntegrationTest): """Mixin class to run test cases from test specification files. @@ -1090,9 +472,9 @@ def insert_initial_data(self, initial_data): db.create_collection(coll_name, write_concern=wc, **opts) @classmethod - def setUpClass(cls): + def _setup_class(cls): # super call creates internal client cls.client - super().setUpClass() + super()._setup_class() # process file-level runOnRequirements run_on_spec = cls.TEST_SPEC.get("runOnRequirements", []) if not cls.should_run_on(run_on_spec): @@ -1125,11 +507,11 @@ def setUpClass(cls): cls.knobs.enable() @classmethod - def tearDownClass(cls): + def _tearDown_class(cls): cls.knobs.disable() for client in cls.mongos_clients: client.close() - super().tearDownClass() + super()._tearDown_class() def setUp(self): super().setUp() @@ -1391,7 +773,7 @@ def _databaseOperation_createCollection(self, target, *args, **kwargs): def __entityOperation_aggregate(self, target, *args, **kwargs): self.__raise_if_unsupported("aggregate", target, Database, Collection) - return list(target.aggregate(*args, **kwargs)) + return (target.aggregate(*args, **kwargs)).to_list() def _databaseOperation_aggregate(self, target, *args, **kwargs): return self.__entityOperation_aggregate(target, *args, **kwargs) @@ -1402,13 +784,13 @@ def _collectionOperation_aggregate(self, target, *args, **kwargs): def _collectionOperation_find(self, target, *args, **kwargs): self.__raise_if_unsupported("find", target, Collection) find_cursor = target.find(*args, **kwargs) - return list(find_cursor) + return find_cursor.to_list() def _collectionOperation_createFindCursor(self, target, *args, **kwargs): self.__raise_if_unsupported("find", target, Collection) if "filter" not in kwargs: self.fail('createFindCursor requires a "filter" argument') - cursor = NonLazyCursor(target.find(*args, **kwargs), target.database.client) + cursor = NonLazyCursor.create(target.find(*args, **kwargs), target.database.client) self.addCleanup(cursor.close) return cursor @@ -1418,7 +800,7 @@ def _collectionOperation_count(self, target, *args, **kwargs): def _collectionOperation_listIndexes(self, target, *args, **kwargs): if "batch_size" in kwargs: self.skipTest("PyMongo does not support batch_size for list_indexes") - return list(target.list_indexes(*args, **kwargs)) + return (target.list_indexes(*args, **kwargs)).to_list() def _collectionOperation_listIndexNames(self, target, *args, **kwargs): self.skipTest("PyMongo does not support list_index_names") @@ -1430,7 +812,7 @@ def _collectionOperation_createSearchIndexes(self, target, *args, **kwargs): def _collectionOperation_listSearchIndexes(self, target, *args, **kwargs): name = kwargs.get("name") agg_kwargs = kwargs.get("aggregation_options", dict()) - return list(target.list_search_indexes(name, **agg_kwargs)) + return (target.list_search_indexes(name, **agg_kwargs)).to_list() def _sessionOperation_withTransaction(self, target, *args, **kwargs): if client_context.storage_engine == "mmapv1": @@ -1470,7 +852,7 @@ def _clientEncryptionOperation_createDataKey(self, target, *args, **kwargs): return target.create_data_key(*args, **kwargs) def _clientEncryptionOperation_getKeys(self, target, *args, **kwargs): - return list(target.get_keys(*args, **kwargs)) + return (target.get_keys(*args, **kwargs)).to_list() def _clientEncryptionOperation_deleteKey(self, target, *args, **kwargs): result = target.delete_key(*args, **kwargs) @@ -1516,7 +898,7 @@ def _bucketOperation_uploadWithId(self, target: GridFSBucket, *args: Any, **kwar def _bucketOperation_find( self, target: GridFSBucket, *args: Any, **kwargs: Any ) -> List[GridOut]: - return list(target.find(*args, **kwargs)) + return target.find(*args, **kwargs).to_list() def run_entity_operation(self, spec): target = self.entity_map[spec["object"]] @@ -1849,7 +1231,10 @@ def run_special_operation(self, spec): except AttributeError: self.fail(f"Unsupported special test operation {opname}") else: - method(spec["arguments"]) + if iscoroutinefunction(method): + method(spec["arguments"]) + else: + method(spec["arguments"]) def run_operations(self, spec): for op in spec: @@ -1985,7 +1370,7 @@ def verify_outcome(self, spec): if expected_documents: sorted_expected_documents = sorted(expected_documents, key=lambda doc: doc["_id"]) - actual_documents = list(coll.find({}, sort=[("_id", ASCENDING)])) + actual_documents = coll.find({}, sort=[("_id", ASCENDING)]).to_list() self.assertListEqual(sorted_expected_documents, actual_documents) def run_scenario(self, spec, uri=None): @@ -2040,7 +1425,7 @@ def _run_scenario(self, spec, uri=None): # process initialData if "initialData" in self.TEST_SPEC: self.insert_initial_data(self.TEST_SPEC["initialData"]) - self._cluster_time = self.client.admin.command("ping").get("$clusterTime") + self._cluster_time = (self.client.admin.command("ping")).get("$clusterTime") self.entity_map.advance_cluster_times() if "expectLogMessages" in spec: diff --git a/test/unified_format_shared.py b/test/unified_format_shared.py new file mode 100644 index 0000000000..d11624476d --- /dev/null +++ b/test/unified_format_shared.py @@ -0,0 +1,679 @@ +# Copyright 2024-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Shared utility functions and constants for the unified test format runner. + +https://github.com/mongodb/specifications/blob/master/source/unified-test-format/unified-test-format.rst +""" +from __future__ import annotations + +import binascii +import collections +import datetime +import os +import time +import types +from collections import abc +from test.helpers import ( + AWS_CREDS, + AWS_CREDS_2, + AZURE_CREDS, + CA_PEM, + CLIENT_PEM, + GCP_CREDS, + KMIP_CREDS, + LOCAL_MASTER_KEY, +) +from test.utils import CMAPListener, camel_to_snake, parse_collection_options +from typing import Any, Union + +from bson import ( + RE_TYPE, + Binary, + Code, + DBRef, + Decimal128, + Int64, + MaxKey, + MinKey, + ObjectId, + Regex, + json_util, +) +from pymongo.monitoring import ( + _SENSITIVE_COMMANDS, + CommandFailedEvent, + CommandListener, + CommandStartedEvent, + CommandSucceededEvent, + ConnectionCheckedInEvent, + ConnectionCheckedOutEvent, + ConnectionCheckOutFailedEvent, + ConnectionCheckOutStartedEvent, + ConnectionClosedEvent, + ConnectionCreatedEvent, + ConnectionReadyEvent, + PoolClearedEvent, + PoolClosedEvent, + PoolCreatedEvent, + PoolReadyEvent, + ServerClosedEvent, + ServerDescriptionChangedEvent, + ServerHeartbeatFailedEvent, + ServerHeartbeatListener, + ServerHeartbeatStartedEvent, + ServerHeartbeatSucceededEvent, + ServerListener, + ServerOpeningEvent, + TopologyClosedEvent, + TopologyDescriptionChangedEvent, + TopologyEvent, + TopologyListener, + TopologyOpenedEvent, + _CommandEvent, + _ConnectionEvent, + _PoolEvent, + _ServerEvent, + _ServerHeartbeatEvent, +) +from pymongo.results import BulkWriteResult +from pymongo.server_description import ServerDescription +from pymongo.topology_description import TopologyDescription + +SKIP_CSOT_TESTS = os.getenv("SKIP_CSOT_TESTS") + +JSON_OPTS = json_util.JSONOptions(tz_aware=False) + +IS_INTERRUPTED = False + +KMS_TLS_OPTS = { + "kmip": { + "tlsCAFile": CA_PEM, + "tlsCertificateKeyFile": CLIENT_PEM, + } +} + + +# Build up a placeholder maps. +PLACEHOLDER_MAP = {} +for provider_name, provider_data in [ + ("local", {"key": LOCAL_MASTER_KEY}), + ("local:name1", {"key": LOCAL_MASTER_KEY}), + ("aws", AWS_CREDS), + ("aws:name1", AWS_CREDS), + ("aws:name2", AWS_CREDS_2), + ("azure", AZURE_CREDS), + ("azure:name1", AZURE_CREDS), + ("gcp", GCP_CREDS), + ("gcp:name1", GCP_CREDS), + ("kmip", KMIP_CREDS), + ("kmip:name1", KMIP_CREDS), +]: + for key, value in provider_data.items(): + placeholder = f"/clientEncryptionOpts/kmsProviders/{provider_name}/{key}" + PLACEHOLDER_MAP[placeholder] = value + +OIDC_ENV = os.environ.get("OIDC_ENV", "test") +if OIDC_ENV == "test": + PLACEHOLDER_MAP["/uriOptions/authMechanismProperties"] = {"ENVIRONMENT": "test"} +elif OIDC_ENV == "azure": + PLACEHOLDER_MAP["/uriOptions/authMechanismProperties"] = { + "ENVIRONMENT": "azure", + "TOKEN_RESOURCE": os.environ["AZUREOIDC_RESOURCE"], + } +elif OIDC_ENV == "gcp": + PLACEHOLDER_MAP["/uriOptions/authMechanismProperties"] = { + "ENVIRONMENT": "gcp", + "TOKEN_RESOURCE": os.environ["GCPOIDC_AUDIENCE"], + } + + +def interrupt_loop(): + global IS_INTERRUPTED + IS_INTERRUPTED = True + + +def with_metaclass(meta, *bases): + """Create a base class with a metaclass. + + Vendored from six: https://github.com/benjaminp/six/blob/master/six.py + """ + + # This requires a bit of explanation: the basic idea is to make a dummy + # metaclass for one level of class instantiation that replaces itself with + # the actual metaclass. + class metaclass(type): + def __new__(cls, name, this_bases, d): + # __orig_bases__ is required by PEP 560. + resolved_bases = types.resolve_bases(bases) + if resolved_bases is not bases: + d["__orig_bases__"] = bases + return meta(name, resolved_bases, d) + + @classmethod + def __prepare__(cls, name, this_bases): + return meta.__prepare__(name, bases) + + return type.__new__(metaclass, "temporary_class", (), {}) + + +def parse_collection_or_database_options(options): + return parse_collection_options(options) + + +def parse_bulk_write_result(result): + upserted_ids = {str(int_idx): result.upserted_ids[int_idx] for int_idx in result.upserted_ids} + return { + "deletedCount": result.deleted_count, + "insertedCount": result.inserted_count, + "matchedCount": result.matched_count, + "modifiedCount": result.modified_count, + "upsertedCount": result.upserted_count, + "upsertedIds": upserted_ids, + } + + +def parse_client_bulk_write_individual(op_type, result): + if op_type == "insert": + return {"insertedId": result.inserted_id} + if op_type == "update": + if result.upserted_id: + return { + "matchedCount": result.matched_count, + "modifiedCount": result.modified_count, + "upsertedId": result.upserted_id, + } + else: + return { + "matchedCount": result.matched_count, + "modifiedCount": result.modified_count, + } + if op_type == "delete": + return { + "deletedCount": result.deleted_count, + } + + +def parse_client_bulk_write_result(result): + insert_results, update_results, delete_results = {}, {}, {} + if result.has_verbose_results: + for idx, res in result.insert_results.items(): + insert_results[str(idx)] = parse_client_bulk_write_individual("insert", res) + for idx, res in result.update_results.items(): + update_results[str(idx)] = parse_client_bulk_write_individual("update", res) + for idx, res in result.delete_results.items(): + delete_results[str(idx)] = parse_client_bulk_write_individual("delete", res) + + return { + "deletedCount": result.deleted_count, + "insertedCount": result.inserted_count, + "matchedCount": result.matched_count, + "modifiedCount": result.modified_count, + "upsertedCount": result.upserted_count, + "insertResults": insert_results, + "updateResults": update_results, + "deleteResults": delete_results, + } + + +def parse_bulk_write_error_result(error): + write_result = BulkWriteResult(error.details, True) + return parse_bulk_write_result(write_result) + + +def parse_client_bulk_write_error_result(error): + write_result = error.partial_result + if not write_result: + return None + return parse_client_bulk_write_result(write_result) + + +class EventListenerUtil( + CMAPListener, CommandListener, ServerListener, ServerHeartbeatListener, TopologyListener +): + def __init__( + self, observe_events, ignore_commands, observe_sensitive_commands, store_events, entity_map + ): + self._event_types = {name.lower() for name in observe_events} + if observe_sensitive_commands: + self._observe_sensitive_commands = True + self._ignore_commands = set(ignore_commands) + else: + self._observe_sensitive_commands = False + self._ignore_commands = _SENSITIVE_COMMANDS | set(ignore_commands) + self._ignore_commands.add("configurefailpoint") + self._event_mapping = collections.defaultdict(list) + self.entity_map = entity_map + if store_events: + for i in store_events: + id = i["id"] + events = (i.lower() for i in i["events"]) + for i in events: + self._event_mapping[i].append(id) + self.entity_map[id] = [] + super().__init__() + + def get_events(self, event_type): + assert event_type in ("command", "cmap", "sdam", "all"), event_type + if event_type == "all": + return list(self.events) + if event_type == "command": + return [e for e in self.events if isinstance(e, _CommandEvent)] + if event_type == "cmap": + return [e for e in self.events if isinstance(e, (_ConnectionEvent, _PoolEvent))] + return [ + e + for e in self.events + if isinstance(e, (_ServerEvent, TopologyEvent, _ServerHeartbeatEvent)) + ] + + def add_event(self, event): + event_name = type(event).__name__.lower() + if event_name in self._event_types: + super().add_event(event) + for id in self._event_mapping[event_name]: + self.entity_map[id].append( + { + "name": type(event).__name__, + "observedAt": time.time(), + "description": repr(event), + } + ) + + def _command_event(self, event): + if event.command_name.lower() not in self._ignore_commands: + self.add_event(event) + + def started(self, event): + if isinstance(event, CommandStartedEvent): + if event.command == {}: + # Command is redacted. Observe only if flag is set. + if self._observe_sensitive_commands: + self._command_event(event) + else: + self._command_event(event) + else: + self.add_event(event) + + def succeeded(self, event): + if isinstance(event, CommandSucceededEvent): + if event.reply == {}: + # Command is redacted. Observe only if flag is set. + if self._observe_sensitive_commands: + self._command_event(event) + else: + self._command_event(event) + else: + self.add_event(event) + + def failed(self, event): + if isinstance(event, CommandFailedEvent): + self._command_event(event) + else: + self.add_event(event) + + def opened(self, event: Union[ServerOpeningEvent, TopologyOpenedEvent]) -> None: + self.add_event(event) + + def description_changed( + self, event: Union[ServerDescriptionChangedEvent, TopologyDescriptionChangedEvent] + ) -> None: + self.add_event(event) + + def topology_changed(self, event: TopologyDescriptionChangedEvent) -> None: + self.add_event(event) + + def closed(self, event: Union[ServerClosedEvent, TopologyClosedEvent]) -> None: + self.add_event(event) + + +binary_types = (Binary, bytes) +long_types = (Int64,) +unicode_type = str + + +BSON_TYPE_ALIAS_MAP = { + # https://mongodb.com/docs/manual/reference/operator/query/type/ + # https://pymongo.readthedocs.io/en/stable/api/bson/index.html + "double": (float,), + "string": (str,), + "object": (abc.Mapping,), + "array": (abc.MutableSequence,), + "binData": binary_types, + "undefined": (type(None),), + "objectId": (ObjectId,), + "bool": (bool,), + "date": (datetime.datetime,), + "null": (type(None),), + "regex": (Regex, RE_TYPE), + "dbPointer": (DBRef,), + "javascript": (unicode_type, Code), + "symbol": (unicode_type,), + "javascriptWithScope": (unicode_type, Code), + "int": (int,), + "long": (Int64,), + "decimal": (Decimal128,), + "maxKey": (MaxKey,), + "minKey": (MinKey,), +} + + +class MatchEvaluatorUtil: + """Utility class that implements methods for evaluating matches as per + the unified test format specification. + """ + + def __init__(self, test_class): + self.test = test_class + + def _operation_exists(self, spec, actual, key_to_compare): + if spec is True: + if key_to_compare is None: + assert actual is not None + else: + self.test.assertIn(key_to_compare, actual) + elif spec is False: + if key_to_compare is None: + assert actual is None + else: + self.test.assertNotIn(key_to_compare, actual) + else: + self.test.fail(f"Expected boolean value for $$exists operator, got {spec}") + + def __type_alias_to_type(self, alias): + if alias not in BSON_TYPE_ALIAS_MAP: + self.test.fail(f"Unrecognized BSON type alias {alias}") + return BSON_TYPE_ALIAS_MAP[alias] + + def _operation_type(self, spec, actual, key_to_compare): + if isinstance(spec, abc.MutableSequence): + permissible_types = tuple( + [t for alias in spec for t in self.__type_alias_to_type(alias)] + ) + else: + permissible_types = self.__type_alias_to_type(spec) + value = actual[key_to_compare] if key_to_compare else actual + self.test.assertIsInstance(value, permissible_types) + + def _operation_matchesEntity(self, spec, actual, key_to_compare): + expected_entity = self.test.entity_map[spec] + self.test.assertEqual(expected_entity, actual[key_to_compare]) + + def _operation_matchesHexBytes(self, spec, actual, key_to_compare): + expected = binascii.unhexlify(spec) + value = actual[key_to_compare] if key_to_compare else actual + self.test.assertEqual(value, expected) + + def _operation_unsetOrMatches(self, spec, actual, key_to_compare): + if key_to_compare is None and not actual: + # top-level document can be None when unset + return + + if key_to_compare not in actual: + # we add a dummy value for the compared key to pass map size check + actual[key_to_compare] = "dummyValue" + return + self.match_result(spec, actual[key_to_compare], in_recursive_call=True) + + def _operation_sessionLsid(self, spec, actual, key_to_compare): + expected_lsid = self.test.entity_map.get_lsid_for_session(spec) + self.test.assertEqual(expected_lsid, actual[key_to_compare]) + + def _operation_lte(self, spec, actual, key_to_compare): + if key_to_compare not in actual: + self.test.fail(f"Actual command is missing the {key_to_compare} field: {spec}") + self.test.assertLessEqual(actual[key_to_compare], spec) + + def _operation_matchAsDocument(self, spec, actual, key_to_compare): + self._match_document(spec, json_util.loads(actual[key_to_compare]), False) + + def _operation_matchAsRoot(self, spec, actual, key_to_compare): + self._match_document(spec, actual, True) + + def _evaluate_special_operation(self, opname, spec, actual, key_to_compare): + method_name = "_operation_{}".format(opname.strip("$")) + try: + method = getattr(self, method_name) + except AttributeError: + self.test.fail(f"Unsupported special matching operator {opname}") + else: + method(spec, actual, key_to_compare) + + def _evaluate_if_special_operation(self, expectation, actual, key_to_compare=None): + """Returns True if a special operation is evaluated, False + otherwise. If the ``expectation`` map contains a single key, + value pair we check it for a special operation. + If given, ``key_to_compare`` is assumed to be the key in + ``expectation`` whose corresponding value needs to be + evaluated for a possible special operation. ``key_to_compare`` + is ignored when ``expectation`` has only one key. + """ + if not isinstance(expectation, abc.Mapping): + return False + + is_special_op, opname, spec = False, False, False + + if key_to_compare is not None: + if key_to_compare.startswith("$$"): + is_special_op = True + opname = key_to_compare + spec = expectation[key_to_compare] + key_to_compare = None + else: + nested = expectation[key_to_compare] + if isinstance(nested, abc.Mapping) and len(nested) == 1: + opname, spec = next(iter(nested.items())) + if opname.startswith("$$"): + is_special_op = True + elif len(expectation) == 1: + opname, spec = next(iter(expectation.items())) + if opname.startswith("$$"): + is_special_op = True + key_to_compare = None + + if is_special_op: + self._evaluate_special_operation( + opname=opname, spec=spec, actual=actual, key_to_compare=key_to_compare + ) + return True + + return False + + def _match_document(self, expectation, actual, is_root, test=False): + if self._evaluate_if_special_operation(expectation, actual): + return + + self.test.assertIsInstance(actual, abc.Mapping) + for key, value in expectation.items(): + if self._evaluate_if_special_operation(expectation, actual, key): + continue + + self.test.assertIn(key, actual) + if not self.match_result(value, actual[key], in_recursive_call=True, test=test): + return False + + if not is_root: + expected_keys = set(expectation.keys()) + for key, value in expectation.items(): + if value == {"$$exists": False}: + expected_keys.remove(key) + if test: + self.test.assertEqual(expected_keys, set(actual.keys())) + else: + return set(expected_keys).issubset(set(actual.keys())) + return True + + def match_result(self, expectation, actual, in_recursive_call=False, test=True): + if isinstance(expectation, abc.Mapping): + return self._match_document( + expectation, actual, is_root=not in_recursive_call, test=test + ) + + if isinstance(expectation, abc.MutableSequence): + self.test.assertIsInstance(actual, abc.MutableSequence) + for e, a in zip(expectation, actual): + if isinstance(e, abc.Mapping): + self._match_document(e, a, is_root=not in_recursive_call, test=test) + else: + self.match_result(e, a, in_recursive_call=True, test=test) + return None + + # account for flexible numerics in element-wise comparison + if isinstance(expectation, int) or isinstance(expectation, float): + if test: + self.test.assertEqual(expectation, actual) + else: + return expectation == actual + return None + else: + if test: + self.test.assertIsInstance(actual, type(expectation)) + self.test.assertEqual(expectation, actual) + else: + return isinstance(actual, type(expectation)) and expectation == actual + return None + + def match_server_description(self, actual: ServerDescription, spec: dict) -> None: + for field, expected in spec.items(): + field = camel_to_snake(field) + if field == "type": + field = "server_type_name" + self.test.assertEqual(getattr(actual, field), expected) + + def match_topology_description(self, actual: TopologyDescription, spec: dict) -> None: + for field, expected in spec.items(): + field = camel_to_snake(field) + if field == "type": + field = "topology_type_name" + self.test.assertEqual(getattr(actual, field), expected) + + def match_event_fields(self, actual: Any, spec: dict) -> None: + for field, expected in spec.items(): + if field == "command" and isinstance(actual, CommandStartedEvent): + command = spec["command"] + if command: + self.match_result(command, actual.command) + continue + if field == "reply" and isinstance(actual, CommandSucceededEvent): + reply = spec["reply"] + if reply: + self.match_result(reply, actual.reply) + continue + if field == "hasServiceId": + if spec["hasServiceId"]: + self.test.assertIsNotNone(actual.service_id) + self.test.assertIsInstance(actual.service_id, ObjectId) + else: + self.test.assertIsNone(actual.service_id) + continue + if field == "hasServerConnectionId": + if spec["hasServerConnectionId"]: + self.test.assertIsNotNone(actual.server_connection_id) + self.test.assertIsInstance(actual.server_connection_id, int) + else: + self.test.assertIsNone(actual.server_connection_id) + continue + if field in ("previousDescription", "newDescription"): + if isinstance(actual, ServerDescriptionChangedEvent): + self.match_server_description( + getattr(actual, camel_to_snake(field)), spec[field] + ) + continue + if isinstance(actual, TopologyDescriptionChangedEvent): + self.match_topology_description( + getattr(actual, camel_to_snake(field)), spec[field] + ) + continue + + if field == "interruptInUseConnections": + field = "interrupt_connections" + else: + field = camel_to_snake(field) + self.test.assertEqual(getattr(actual, field), expected) + + def match_event(self, expectation, actual): + name, spec = next(iter(expectation.items())) + if name == "commandStartedEvent": + self.test.assertIsInstance(actual, CommandStartedEvent) + elif name == "commandSucceededEvent": + self.test.assertIsInstance(actual, CommandSucceededEvent) + elif name == "commandFailedEvent": + self.test.assertIsInstance(actual, CommandFailedEvent) + elif name == "poolCreatedEvent": + self.test.assertIsInstance(actual, PoolCreatedEvent) + elif name == "poolReadyEvent": + self.test.assertIsInstance(actual, PoolReadyEvent) + elif name == "poolClearedEvent": + self.test.assertIsInstance(actual, PoolClearedEvent) + self.test.assertIsInstance(actual.interrupt_connections, bool) + elif name == "poolClosedEvent": + self.test.assertIsInstance(actual, PoolClosedEvent) + elif name == "connectionCreatedEvent": + self.test.assertIsInstance(actual, ConnectionCreatedEvent) + elif name == "connectionReadyEvent": + self.test.assertIsInstance(actual, ConnectionReadyEvent) + elif name == "connectionClosedEvent": + self.test.assertIsInstance(actual, ConnectionClosedEvent) + elif name == "connectionCheckOutStartedEvent": + self.test.assertIsInstance(actual, ConnectionCheckOutStartedEvent) + elif name == "connectionCheckOutFailedEvent": + self.test.assertIsInstance(actual, ConnectionCheckOutFailedEvent) + elif name == "connectionCheckedOutEvent": + self.test.assertIsInstance(actual, ConnectionCheckedOutEvent) + elif name == "connectionCheckedInEvent": + self.test.assertIsInstance(actual, ConnectionCheckedInEvent) + elif name == "serverDescriptionChangedEvent": + self.test.assertIsInstance(actual, ServerDescriptionChangedEvent) + elif name == "serverHeartbeatStartedEvent": + self.test.assertIsInstance(actual, ServerHeartbeatStartedEvent) + elif name == "serverHeartbeatSucceededEvent": + self.test.assertIsInstance(actual, ServerHeartbeatSucceededEvent) + elif name == "serverHeartbeatFailedEvent": + self.test.assertIsInstance(actual, ServerHeartbeatFailedEvent) + elif name == "topologyDescriptionChangedEvent": + self.test.assertIsInstance(actual, TopologyDescriptionChangedEvent) + elif name == "topologyOpeningEvent": + self.test.assertIsInstance(actual, TopologyOpenedEvent) + elif name == "topologyClosedEvent": + self.test.assertIsInstance(actual, TopologyClosedEvent) + else: + raise Exception(f"Unsupported event type {name}") + + self.match_event_fields(actual, spec) + + +def coerce_result(opname, result): + """Convert a pymongo result into the spec's result format.""" + if hasattr(result, "acknowledged") and not result.acknowledged: + return {"acknowledged": False} + if opname == "bulkWrite": + return parse_bulk_write_result(result) + if opname == "clientBulkWrite": + return parse_client_bulk_write_result(result) + if opname == "insertOne": + return {"insertedId": result.inserted_id} + if opname == "insertMany": + return dict(enumerate(result.inserted_ids)) + if opname in ("deleteOne", "deleteMany"): + return {"deletedCount": result.deleted_count} + if opname in ("updateOne", "updateMany", "replaceOne"): + value = { + "matchedCount": result.matched_count, + "modifiedCount": result.modified_count, + "upsertedCount": 0 if result.upserted_id is None else 1, + } + if result.upserted_id is not None: + value["upsertedId"] = result.upserted_id + return value + return result diff --git a/tools/synchro.py b/tools/synchro.py index f704919a17..e0af5efa44 100644 --- a/tools/synchro.py +++ b/tools/synchro.py @@ -205,6 +205,7 @@ def async_only_test(f: str) -> bool: "test_retryable_writes.py", "test_session.py", "test_transactions.py", + "unified_format.py", ] sync_test_files = [ From 7e86d24c7bffe4da0a4d32580b5da0e6230b78d2 Mon Sep 17 00:00:00 2001 From: Noah Stapp Date: Fri, 11 Oct 2024 13:59:37 -0400 Subject: [PATCH 023/182] PYTHON-4849 - Convert test.test_connection_logging.py to async (#1918) --- test/asynchronous/test_connection_logging.py | 45 ++++++++++++++++++++ test/test_connection_logging.py | 8 +++- tools/synchro.py | 1 + 3 files changed, 53 insertions(+), 1 deletion(-) create mode 100644 test/asynchronous/test_connection_logging.py diff --git a/test/asynchronous/test_connection_logging.py b/test/asynchronous/test_connection_logging.py new file mode 100644 index 0000000000..6bc9835b70 --- /dev/null +++ b/test/asynchronous/test_connection_logging.py @@ -0,0 +1,45 @@ +# Copyright 2023-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Run the connection logging unified format spec tests.""" +from __future__ import annotations + +import os +import pathlib +import sys + +sys.path[0:0] = [""] + +from test import unittest +from test.unified_format import generate_test_classes + +_IS_SYNC = False + +# Location of JSON test specifications. +if _IS_SYNC: + _TEST_PATH = os.path.join(pathlib.Path(__file__).resolve().parent, "connection_logging") +else: + _TEST_PATH = os.path.join(pathlib.Path(__file__).resolve().parent.parent, "connection_logging") + + +globals().update( + generate_test_classes( + _TEST_PATH, + module=__name__, + ) +) + + +if __name__ == "__main__": + unittest.main() diff --git a/test/test_connection_logging.py b/test/test_connection_logging.py index 262ce821eb..253193cc43 100644 --- a/test/test_connection_logging.py +++ b/test/test_connection_logging.py @@ -16,6 +16,7 @@ from __future__ import annotations import os +import pathlib import sys sys.path[0:0] = [""] @@ -23,8 +24,13 @@ from test import unittest from test.unified_format import generate_test_classes +_IS_SYNC = True + # Location of JSON test specifications. -_TEST_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), "connection_logging") +if _IS_SYNC: + _TEST_PATH = os.path.join(pathlib.Path(__file__).resolve().parent, "connection_logging") +else: + _TEST_PATH = os.path.join(pathlib.Path(__file__).resolve().parent.parent, "connection_logging") globals().update( diff --git a/tools/synchro.py b/tools/synchro.py index e0af5efa44..dbaf0a15e9 100644 --- a/tools/synchro.py +++ b/tools/synchro.py @@ -193,6 +193,7 @@ def async_only_test(f: str) -> bool: "test_collation.py", "test_collection.py", "test_common.py", + "test_connection_logging.py", "test_connections_survive_primary_stepdown_spec.py", "test_cursor.py", "test_database.py", From e0fde2338126ee3e8ca7771b3f88c4a2706638f2 Mon Sep 17 00:00:00 2001 From: Noah Stapp Date: Fri, 11 Oct 2024 13:59:44 -0400 Subject: [PATCH 024/182] PYTHON-4850 - Convert test.test_crud_unified to async (#1920) --- test/asynchronous/test_crud_unified.py | 39 ++++++++++++++++++++++++++ test/test_crud_unified.py | 10 +++++-- tools/synchro.py | 1 + 3 files changed, 48 insertions(+), 2 deletions(-) create mode 100644 test/asynchronous/test_crud_unified.py diff --git a/test/asynchronous/test_crud_unified.py b/test/asynchronous/test_crud_unified.py new file mode 100644 index 0000000000..3d8deb36e9 --- /dev/null +++ b/test/asynchronous/test_crud_unified.py @@ -0,0 +1,39 @@ +# Copyright 2021-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Test the CRUD unified spec tests.""" +from __future__ import annotations + +import os +import pathlib +import sys + +sys.path[0:0] = [""] + +from test import unittest +from test.unified_format import generate_test_classes + +_IS_SYNC = False + +# Location of JSON test specifications. +if _IS_SYNC: + _TEST_PATH = os.path.join(pathlib.Path(__file__).resolve().parent, "crud", "unified") +else: + _TEST_PATH = os.path.join(pathlib.Path(__file__).resolve().parent.parent, "crud", "unified") + +# Generate unified tests. +globals().update(generate_test_classes(_TEST_PATH, module=__name__, RUN_ON_SERVERLESS=True)) + +if __name__ == "__main__": + unittest.main() diff --git a/test/test_crud_unified.py b/test/test_crud_unified.py index 92a60a47fc..26f34cba88 100644 --- a/test/test_crud_unified.py +++ b/test/test_crud_unified.py @@ -16,6 +16,7 @@ from __future__ import annotations import os +import pathlib import sys sys.path[0:0] = [""] @@ -23,11 +24,16 @@ from test import unittest from test.unified_format import generate_test_classes +_IS_SYNC = True + # Location of JSON test specifications. -TEST_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), "crud", "unified") +if _IS_SYNC: + _TEST_PATH = os.path.join(pathlib.Path(__file__).resolve().parent, "crud", "unified") +else: + _TEST_PATH = os.path.join(pathlib.Path(__file__).resolve().parent.parent, "crud", "unified") # Generate unified tests. -globals().update(generate_test_classes(TEST_PATH, module=__name__, RUN_ON_SERVERLESS=True)) +globals().update(generate_test_classes(_TEST_PATH, module=__name__, RUN_ON_SERVERLESS=True)) if __name__ == "__main__": unittest.main() diff --git a/tools/synchro.py b/tools/synchro.py index dbaf0a15e9..39ce7fbdd0 100644 --- a/tools/synchro.py +++ b/tools/synchro.py @@ -195,6 +195,7 @@ def async_only_test(f: str) -> bool: "test_common.py", "test_connection_logging.py", "test_connections_survive_primary_stepdown_spec.py", + "test_crud_unified.py", "test_cursor.py", "test_database.py", "test_encryption.py", From b2332b2aaeb26ecd7efa4992f037ca4dc56583db Mon Sep 17 00:00:00 2001 From: Noah Stapp Date: Fri, 11 Oct 2024 13:59:49 -0400 Subject: [PATCH 025/182] PYTHON-4846 - Convert test.test_command_logging.py to async (#1915) --- test/asynchronous/test_command_logging.py | 44 +++++++++++++++++++++++ test/test_command_logging.py | 9 ++++- tools/synchro.py | 1 + 3 files changed, 53 insertions(+), 1 deletion(-) create mode 100644 test/asynchronous/test_command_logging.py diff --git a/test/asynchronous/test_command_logging.py b/test/asynchronous/test_command_logging.py new file mode 100644 index 0000000000..f9b459c152 --- /dev/null +++ b/test/asynchronous/test_command_logging.py @@ -0,0 +1,44 @@ +# Copyright 2023-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Run the command monitoring unified format spec tests.""" +from __future__ import annotations + +import os +import pathlib +import sys + +sys.path[0:0] = [""] + +from test import unittest +from test.asynchronous.unified_format import generate_test_classes + +_IS_SYNC = False + +# Location of JSON test specifications. +if _IS_SYNC: + _TEST_PATH = os.path.join(pathlib.Path(__file__).resolve().parent, "command_logging") +else: + _TEST_PATH = os.path.join(pathlib.Path(__file__).resolve().parent.parent, "command_logging") + + +globals().update( + generate_test_classes( + _TEST_PATH, + module=__name__, + ) +) + +if __name__ == "__main__": + unittest.main() diff --git a/test/test_command_logging.py b/test/test_command_logging.py index 9b2d52e66b..cf865920ca 100644 --- a/test/test_command_logging.py +++ b/test/test_command_logging.py @@ -16,6 +16,7 @@ from __future__ import annotations import os +import pathlib import sys sys.path[0:0] = [""] @@ -23,8 +24,14 @@ from test import unittest from test.unified_format import generate_test_classes +_IS_SYNC = True + # Location of JSON test specifications. -_TEST_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), "command_logging") +if _IS_SYNC: + _TEST_PATH = os.path.join(pathlib.Path(__file__).resolve().parent, "command_logging") +else: + _TEST_PATH = os.path.join(pathlib.Path(__file__).resolve().parent.parent, "command_logging") + globals().update( generate_test_classes( diff --git a/tools/synchro.py b/tools/synchro.py index 39ce7fbdd0..f40a64e4c2 100644 --- a/tools/synchro.py +++ b/tools/synchro.py @@ -192,6 +192,7 @@ def async_only_test(f: str) -> bool: "test_client_context.py", "test_collation.py", "test_collection.py", + "test_command_logging.py", "test_common.py", "test_connection_logging.py", "test_connections_survive_primary_stepdown_spec.py", From 4eeaa4b7be9e814fd207166904f42556c10ce63b Mon Sep 17 00:00:00 2001 From: Noah Stapp Date: Fri, 11 Oct 2024 14:56:43 -0400 Subject: [PATCH 026/182] PYTHON-4848 - Convert test.test_command_monitoring.py to async (#1917) --- test/asynchronous/test_command_monitoring.py | 45 ++++++++++++++++++++ test/test_command_monitoring.py | 8 +++- tools/synchro.py | 1 + 3 files changed, 53 insertions(+), 1 deletion(-) create mode 100644 test/asynchronous/test_command_monitoring.py diff --git a/test/asynchronous/test_command_monitoring.py b/test/asynchronous/test_command_monitoring.py new file mode 100644 index 0000000000..311fd1fdc1 --- /dev/null +++ b/test/asynchronous/test_command_monitoring.py @@ -0,0 +1,45 @@ +# Copyright 2015-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Run the command monitoring unified format spec tests.""" +from __future__ import annotations + +import os +import pathlib +import sys + +sys.path[0:0] = [""] + +from test import unittest +from test.asynchronous.unified_format import generate_test_classes + +_IS_SYNC = False + +# Location of JSON test specifications. +if _IS_SYNC: + _TEST_PATH = os.path.join(pathlib.Path(__file__).resolve().parent, "command_monitoring") +else: + _TEST_PATH = os.path.join(pathlib.Path(__file__).resolve().parent.parent, "command_monitoring") + + +globals().update( + generate_test_classes( + _TEST_PATH, + module=__name__, + ) +) + + +if __name__ == "__main__": + unittest.main() diff --git a/test/test_command_monitoring.py b/test/test_command_monitoring.py index d2f578824d..4f5ef06f28 100644 --- a/test/test_command_monitoring.py +++ b/test/test_command_monitoring.py @@ -16,6 +16,7 @@ from __future__ import annotations import os +import pathlib import sys sys.path[0:0] = [""] @@ -23,8 +24,13 @@ from test import unittest from test.unified_format import generate_test_classes +_IS_SYNC = True + # Location of JSON test specifications. -_TEST_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), "command_monitoring") +if _IS_SYNC: + _TEST_PATH = os.path.join(pathlib.Path(__file__).resolve().parent, "command_monitoring") +else: + _TEST_PATH = os.path.join(pathlib.Path(__file__).resolve().parent.parent, "command_monitoring") globals().update( diff --git a/tools/synchro.py b/tools/synchro.py index f40a64e4c2..b6812e9be6 100644 --- a/tools/synchro.py +++ b/tools/synchro.py @@ -193,6 +193,7 @@ def async_only_test(f: str) -> bool: "test_collation.py", "test_collection.py", "test_command_logging.py", + "test_command_monitoring.py", "test_common.py", "test_connection_logging.py", "test_connections_survive_primary_stepdown_spec.py", From 33163ecc0d4fe7dc8f7bfc12ef93d89513203fe2 Mon Sep 17 00:00:00 2001 From: Iris <58442094+sleepyStick@users.noreply.github.com> Date: Fri, 11 Oct 2024 16:02:13 -0700 Subject: [PATCH 027/182] PYTHON-4804 Migrate test_comment.py to async (#1887) --- test/asynchronous/test_comment.py | 159 ++++++++++++++++++++++++++++++ test/test_comment.py | 60 ++++------- tools/synchro.py | 2 + 3 files changed, 179 insertions(+), 42 deletions(-) create mode 100644 test/asynchronous/test_comment.py diff --git a/test/asynchronous/test_comment.py b/test/asynchronous/test_comment.py new file mode 100644 index 0000000000..be3626a8b8 --- /dev/null +++ b/test/asynchronous/test_comment.py @@ -0,0 +1,159 @@ +# Copyright 2022-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Test the keyword argument 'comment' in various helpers.""" + +from __future__ import annotations + +import inspect +import sys + +sys.path[0:0] = [""] +from asyncio import iscoroutinefunction +from test.asynchronous import AsyncIntegrationTest, async_client_context, unittest +from test.utils import OvertCommandListener + +from bson.dbref import DBRef +from pymongo.asynchronous.command_cursor import AsyncCommandCursor +from pymongo.operations import IndexModel + +_IS_SYNC = False + + +class AsyncTestComment(AsyncIntegrationTest): + async def _test_ops( + self, + helpers, + already_supported, + listener, + ): + for h, args in helpers: + c = "testing comment with " + h.__name__ + with self.subTest("collection-" + h.__name__ + "-comment"): + for cc in [c, {"key": c}, ["any", 1]]: + listener.reset() + kwargs = {"comment": cc} + try: + maybe_cursor = await h(*args, **kwargs) + except Exception: + maybe_cursor = None + self.assertIn( + "comment", + inspect.signature(h).parameters, + msg="Could not find 'comment' in the " + "signature of function %s" % (h.__name__), + ) + self.assertEqual( + inspect.signature(h).parameters["comment"].annotation, "Optional[Any]" + ) + if isinstance(maybe_cursor, AsyncCommandCursor): + await maybe_cursor.close() + + cmd = listener.started_events[0] + self.assertEqual(cc, cmd.command.get("comment"), msg=cmd) + + if h.__name__ != "aggregate_raw_batches": + self.assertIn( + ":param comment:", + h.__doc__, + ) + if h not in already_supported: + self.assertIn( + "Added ``comment`` parameter", + h.__doc__, + ) + else: + self.assertNotIn( + "Added ``comment`` parameter", + h.__doc__, + ) + + listener.reset() + + @async_client_context.require_version_min(4, 7, -1) + @async_client_context.require_replica_set + async def test_database_helpers(self): + listener = OvertCommandListener() + db = (await self.async_rs_or_single_client(event_listeners=[listener])).db + helpers = [ + (db.watch, []), + (db.command, ["hello"]), + (db.list_collections, []), + (db.list_collection_names, []), + (db.drop_collection, ["hello"]), + (db.validate_collection, ["test"]), + (db.dereference, [DBRef("collection", 1)]), + ] + already_supported = [db.command, db.list_collections, db.list_collection_names] + await self._test_ops(helpers, already_supported, listener) + + @async_client_context.require_version_min(4, 7, -1) + @async_client_context.require_replica_set + async def test_client_helpers(self): + listener = OvertCommandListener() + cli = await self.async_rs_or_single_client(event_listeners=[listener]) + helpers = [ + (cli.watch, []), + (cli.list_databases, []), + (cli.list_database_names, []), + (cli.drop_database, ["test"]), + ] + already_supported = [ + cli.list_databases, + ] + await self._test_ops(helpers, already_supported, listener) + + @async_client_context.require_version_min(4, 7, -1) + async def test_collection_helpers(self): + listener = OvertCommandListener() + db = (await self.async_rs_or_single_client(event_listeners=[listener]))[self.db.name] + coll = db.get_collection("test") + + helpers = [ + (coll.list_indexes, []), + (coll.drop, []), + (coll.index_information, []), + (coll.options, []), + (coll.aggregate, [[{"$set": {"x": 1}}]]), + (coll.aggregate_raw_batches, [[{"$set": {"x": 1}}]]), + (coll.rename, ["temp_temp_temp"]), + (coll.distinct, ["_id"]), + (coll.find_one_and_delete, [{}]), + (coll.find_one_and_replace, [{}, {}]), + (coll.find_one_and_update, [{}, {"$set": {"a": 1}}]), + (coll.estimated_document_count, []), + (coll.count_documents, [{}]), + (coll.create_indexes, [[IndexModel("a")]]), + (coll.create_index, ["a"]), + (coll.drop_index, [[("a", 1)]]), + (coll.drop_indexes, []), + ] + already_supported = [ + coll.estimated_document_count, + coll.count_documents, + coll.create_indexes, + coll.drop_indexes, + coll.options, + coll.find_one_and_replace, + coll.drop_index, + coll.rename, + coll.distinct, + coll.find_one_and_delete, + coll.find_one_and_update, + ] + await self._test_ops(helpers, already_supported, listener) + + +if __name__ == "__main__": + unittest.main() diff --git a/test/test_comment.py b/test/test_comment.py index c0f037ea44..9f9bf98640 100644 --- a/test/test_comment.py +++ b/test/test_comment.py @@ -20,24 +20,15 @@ import sys sys.path[0:0] = [""] - +from asyncio import iscoroutinefunction from test import IntegrationTest, client_context, unittest -from test.utils import EventListener +from test.utils import OvertCommandListener from bson.dbref import DBRef from pymongo.operations import IndexModel from pymongo.synchronous.command_cursor import CommandCursor - -class Empty: - def __getattr__(self, item): - try: - self.__dict__[item] - except KeyError: - return self.empty - - def empty(self, *args, **kwargs): - return Empty() +_IS_SYNC = True class TestComment(IntegrationTest): @@ -46,8 +37,6 @@ def _test_ops( helpers, already_supported, listener, - db=Empty(), # noqa: B008 - coll=Empty(), # noqa: B008 ): for h, args in helpers: c = "testing comment with " + h.__name__ @@ -55,19 +44,10 @@ def _test_ops( for cc in [c, {"key": c}, ["any", 1]]: listener.reset() kwargs = {"comment": cc} - if h == coll.rename: - _ = db.get_collection("temp_temp_temp").drop() - destruct_coll = db.get_collection("test_temp") - destruct_coll.insert_one({}) - maybe_cursor = destruct_coll.rename(*args, **kwargs) - destruct_coll.drop() - elif h == db.validate_collection: - coll = db.get_collection("test") - coll.insert_one({}) - maybe_cursor = db.validate_collection(*args, **kwargs) - else: - coll.create_index("a") + try: maybe_cursor = h(*args, **kwargs) + except Exception: + maybe_cursor = None self.assertIn( "comment", inspect.signature(h).parameters, @@ -79,15 +59,11 @@ def _test_ops( ) if isinstance(maybe_cursor, CommandCursor): maybe_cursor.close() - tested = False - # For some reason collection.list_indexes creates two commands and the first - # one doesn't contain 'comment'. - for i in listener.started_events: - if cc == i.command.get("comment", ""): - self.assertEqual(cc, i.command["comment"]) - tested = True - self.assertTrue(tested) - if h not in [coll.aggregate_raw_batches]: + + cmd = listener.started_events[0] + self.assertEqual(cc, cmd.command.get("comment"), msg=cmd) + + if h.__name__ != "aggregate_raw_batches": self.assertIn( ":param comment:", h.__doc__, @@ -108,8 +84,8 @@ def _test_ops( @client_context.require_version_min(4, 7, -1) @client_context.require_replica_set def test_database_helpers(self): - listener = EventListener() - db = self.rs_or_single_client(event_listeners=[listener]).db + listener = OvertCommandListener() + db = (self.rs_or_single_client(event_listeners=[listener])).db helpers = [ (db.watch, []), (db.command, ["hello"]), @@ -120,12 +96,12 @@ def test_database_helpers(self): (db.dereference, [DBRef("collection", 1)]), ] already_supported = [db.command, db.list_collections, db.list_collection_names] - self._test_ops(helpers, already_supported, listener, db=db, coll=db.get_collection("test")) + self._test_ops(helpers, already_supported, listener) @client_context.require_version_min(4, 7, -1) @client_context.require_replica_set def test_client_helpers(self): - listener = EventListener() + listener = OvertCommandListener() cli = self.rs_or_single_client(event_listeners=[listener]) helpers = [ (cli.watch, []), @@ -140,8 +116,8 @@ def test_client_helpers(self): @client_context.require_version_min(4, 7, -1) def test_collection_helpers(self): - listener = EventListener() - db = self.rs_or_single_client(event_listeners=[listener])[self.db.name] + listener = OvertCommandListener() + db = (self.rs_or_single_client(event_listeners=[listener]))[self.db.name] coll = db.get_collection("test") helpers = [ @@ -176,7 +152,7 @@ def test_collection_helpers(self): coll.find_one_and_delete, coll.find_one_and_update, ] - self._test_ops(helpers, already_supported, listener, coll=coll, db=db) + self._test_ops(helpers, already_supported, listener) if __name__ == "__main__": diff --git a/tools/synchro.py b/tools/synchro.py index b6812e9be6..25f506ed5a 100644 --- a/tools/synchro.py +++ b/tools/synchro.py @@ -193,7 +193,9 @@ def async_only_test(f: str) -> bool: "test_collation.py", "test_collection.py", "test_command_logging.py", + "test_command_logging.py", "test_command_monitoring.py", + "test_comment.py", "test_common.py", "test_connection_logging.py", "test_connections_survive_primary_stepdown_spec.py", From 3c5e71a1cb28b695bc2eec4c3927ef6af56835a8 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Mon, 14 Oct 2024 07:32:38 -0500 Subject: [PATCH 028/182] PYTHON-4862 Fix handling of interrupt_loop in unified test runner (#1924) --- test/asynchronous/unified_format.py | 8 +++++++- test/unified_format.py | 8 +++++++- test/unified_format_shared.py | 5 ----- 3 files changed, 14 insertions(+), 7 deletions(-) diff --git a/test/asynchronous/unified_format.py b/test/asynchronous/unified_format.py index 4c37422951..42bda59cb2 100644 --- a/test/asynchronous/unified_format.py +++ b/test/asynchronous/unified_format.py @@ -36,7 +36,6 @@ unittest, ) from test.unified_format_shared import ( - IS_INTERRUPTED, KMS_TLS_OPTS, PLACEHOLDER_MAP, SKIP_CSOT_TESTS, @@ -104,6 +103,13 @@ _IS_SYNC = False +IS_INTERRUPTED = False + + +def interrupt_loop(): + global IS_INTERRUPTED + IS_INTERRUPTED = True + async def is_run_on_requirement_satisfied(requirement): topology_satisfied = True diff --git a/test/unified_format.py b/test/unified_format.py index 6a19082b86..13ab0af69b 100644 --- a/test/unified_format.py +++ b/test/unified_format.py @@ -36,7 +36,6 @@ unittest, ) from test.unified_format_shared import ( - IS_INTERRUPTED, KMS_TLS_OPTS, PLACEHOLDER_MAP, SKIP_CSOT_TESTS, @@ -104,6 +103,13 @@ _IS_SYNC = True +IS_INTERRUPTED = False + + +def interrupt_loop(): + global IS_INTERRUPTED + IS_INTERRUPTED = True + def is_run_on_requirement_satisfied(requirement): topology_satisfied = True diff --git a/test/unified_format_shared.py b/test/unified_format_shared.py index d11624476d..f1b908a7a6 100644 --- a/test/unified_format_shared.py +++ b/test/unified_format_shared.py @@ -139,11 +139,6 @@ } -def interrupt_loop(): - global IS_INTERRUPTED - IS_INTERRUPTED = True - - def with_metaclass(meta, *bases): """Create a base class with a metaclass. From 9ba780cac256720be5c3c5051c7f8a19d27693d5 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Mon, 14 Oct 2024 07:34:01 -0500 Subject: [PATCH 029/182] PYTHON-4861 Ensure hatch is isolated in Evergreen (#1923) --- .evergreen/hatch.sh | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/.evergreen/hatch.sh b/.evergreen/hatch.sh index db0da2f4d0..8f862c39d2 100644 --- a/.evergreen/hatch.sh +++ b/.evergreen/hatch.sh @@ -18,17 +18,22 @@ if [ -n "$SKIP_HATCH" ]; then run_hatch() { bash ./.evergreen/run-tests.sh } -elif $PYTHON_BINARY -m hatch --version; then - run_hatch() { - $PYTHON_BINARY -m hatch run "$@" - } -else # No toolchain hatch present, set up virtualenv before installing hatch +else # Set up virtualenv before installing hatch # Use a random venv name because the encryption tasks run this script multiple times in the same run. ENV_NAME=hatchenv-$RANDOM createvirtualenv "$PYTHON_BINARY" $ENV_NAME # shellcheck disable=SC2064 trap "deactivate; rm -rf $ENV_NAME" EXIT HUP python -m pip install -q hatch + + # Ensure hatch does not write to user or global locations. + touch hatch_config.toml + HATCH_CONFIG=$(pwd)/hatch_config.toml + export HATCH_CONFIG + hatch config restore + hatch config set dirs.data ".hatch/data" + hatch config set dirs.cache ".hatch/cache" + run_hatch() { python -m hatch run "$@" } From 3cc722e9105d5818d57739d623d985d69b0eb626 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Mon, 14 Oct 2024 14:05:22 -0500 Subject: [PATCH 030/182] PYTHON-4838 Generate OCSP build variants using shrub.py (#1910) --- .evergreen/config.yml | 174 +++++++++++++++++++++----- .evergreen/scripts/generate_config.py | 167 ++++++++++++++++++++++++ 2 files changed, 308 insertions(+), 33 deletions(-) create mode 100644 .evergreen/scripts/generate_config.py diff --git a/.evergreen/config.yml b/.evergreen/config.yml index 1ef8751501..dee4b608ec 100644 --- a/.evergreen/config.yml +++ b/.evergreen/config.yml @@ -2826,42 +2826,150 @@ buildvariants: - "test-6.0-standalone" - "test-5.0-standalone" -- matrix_name: "ocsp-test" - matrix_spec: - platform: rhel8 - python-version: ["3.9", "3.10", "pypy3.9", "pypy3.10"] - mongodb-version: ["4.4", "5.0", "6.0", "7.0", "8.0", "latest"] - auth: "noauth" - ssl: "ssl" - display_name: "OCSP test ${platform} ${python-version} ${mongodb-version}" - batchtime: 20160 # 14 days +# OCSP test matrix. +- name: ocsp-test-rhel8-v4.4-py3.9 tasks: - - name: ".ocsp" - -- matrix_name: "ocsp-test-windows" - matrix_spec: - platform: windows - python-version-windows: ["3.9", "3.10"] - mongodb-version: ["4.4", "5.0", "6.0", "7.0", "8.0", "latest"] - auth: "noauth" - ssl: "ssl" - display_name: "OCSP test ${platform} ${python-version-windows} ${mongodb-version}" - batchtime: 20160 # 14 days + - name: .ocsp + display_name: OCSP test RHEL8 v4.4 py3.9 + run_on: + - rhel87-small + batchtime: 20160 + expansions: + AUTH: noauth + SSL: ssl + TOPOLOGY: server + VERSION: "4.4" + PYTHON_BINARY: /opt/python/3.9/bin/python3 +- name: ocsp-test-rhel8-v5.0-py3.10 tasks: - # Windows MongoDB servers do not staple OCSP responses and only support RSA. - - name: ".ocsp-rsa !.ocsp-staple" - -- matrix_name: "ocsp-test-macos" - matrix_spec: - platform: macos - mongodb-version: ["4.4", "5.0", "6.0", "7.0", "8.0", "latest"] - auth: "noauth" - ssl: "ssl" - display_name: "OCSP test ${platform} ${mongodb-version}" - batchtime: 20160 # 14 days + - name: .ocsp + display_name: OCSP test RHEL8 v5.0 py3.10 + run_on: + - rhel87-small + batchtime: 20160 + expansions: + AUTH: noauth + SSL: ssl + TOPOLOGY: server + VERSION: "5.0" + PYTHON_BINARY: /opt/python/3.10/bin/python3 +- name: ocsp-test-rhel8-v6.0-py3.11 tasks: - # macOS MongoDB servers do not staple OCSP responses and only support RSA. - - name: ".ocsp-rsa !.ocsp-staple" + - name: .ocsp + display_name: OCSP test RHEL8 v6.0 py3.11 + run_on: + - rhel87-small + batchtime: 20160 + expansions: + AUTH: noauth + SSL: ssl + TOPOLOGY: server + VERSION: "6.0" + PYTHON_BINARY: /opt/python/3.11/bin/python3 +- name: ocsp-test-rhel8-v7.0-py3.12 + tasks: + - name: .ocsp + display_name: OCSP test RHEL8 v7.0 py3.12 + run_on: + - rhel87-small + batchtime: 20160 + expansions: + AUTH: noauth + SSL: ssl + TOPOLOGY: server + VERSION: "7.0" + PYTHON_BINARY: /opt/python/3.12/bin/python3 +- name: ocsp-test-rhel8-v8.0-py3.13 + tasks: + - name: .ocsp + display_name: OCSP test RHEL8 v8.0 py3.13 + run_on: + - rhel87-small + batchtime: 20160 + expansions: + AUTH: noauth + SSL: ssl + TOPOLOGY: server + VERSION: "8.0" + PYTHON_BINARY: /opt/python/3.13/bin/python3 +- name: ocsp-test-rhel8-rapid-pypy3.9 + tasks: + - name: .ocsp + display_name: OCSP test RHEL8 rapid pypy3.9 + run_on: + - rhel87-small + batchtime: 20160 + expansions: + AUTH: noauth + SSL: ssl + TOPOLOGY: server + VERSION: rapid + PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 +- name: ocsp-test-rhel8-latest-pypy3.10 + tasks: + - name: .ocsp + display_name: OCSP test RHEL8 latest pypy3.10 + run_on: + - rhel87-small + batchtime: 20160 + expansions: + AUTH: noauth + SSL: ssl + TOPOLOGY: server + VERSION: latest + PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 +- name: ocsp-test-win64-v4.4-py3.9 + tasks: + - name: .ocsp-rsa !.ocsp-staple + display_name: OCSP test Win64 v4.4 py3.9 + run_on: + - windows-64-vsMulti-small + batchtime: 20160 + expansions: + AUTH: noauth + SSL: ssl + TOPOLOGY: server + VERSION: "4.4" + PYTHON_BINARY: C:/python/Python39/python.exe +- name: ocsp-test-win64-v8.0-py3.13 + tasks: + - name: .ocsp-rsa !.ocsp-staple + display_name: OCSP test Win64 v8.0 py3.13 + run_on: + - windows-64-vsMulti-small + batchtime: 20160 + expansions: + AUTH: noauth + SSL: ssl + TOPOLOGY: server + VERSION: "8.0" + PYTHON_BINARY: C:/python/Python313/python.exe +- name: ocsp-test-macos-v4.4-py3.9 + tasks: + - name: .ocsp-rsa !.ocsp-staple + display_name: OCSP test macOS v4.4 py3.9 + run_on: + - macos-14 + batchtime: 20160 + expansions: + AUTH: noauth + SSL: ssl + TOPOLOGY: server + VERSION: "4.4" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 +- name: ocsp-test-macos-v8.0-py3.13 + tasks: + - name: .ocsp-rsa !.ocsp-staple + display_name: OCSP test macOS v8.0 py3.13 + run_on: + - macos-14 + batchtime: 20160 + expansions: + AUTH: noauth + SSL: ssl + TOPOLOGY: server + VERSION: "8.0" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 - matrix_name: "oidc-auth-test" matrix_spec: diff --git a/.evergreen/scripts/generate_config.py b/.evergreen/scripts/generate_config.py new file mode 100644 index 0000000000..e98e527b72 --- /dev/null +++ b/.evergreen/scripts/generate_config.py @@ -0,0 +1,167 @@ +# /// script +# requires-python = ">=3.9" +# dependencies = [ +# "shrub.py>=3.2.0", +# "pyyaml>=6.0.2" +# ] +# /// + +# Note: Run this file with `hatch run`, `pipx run`, or `uv run`. +from __future__ import annotations + +from dataclasses import dataclass +from itertools import cycle, product, zip_longest +from typing import Any + +from shrub.v3.evg_build_variant import BuildVariant +from shrub.v3.evg_project import EvgProject +from shrub.v3.evg_task import EvgTaskRef +from shrub.v3.shrub_service import ShrubService + +############## +# Globals +############## + +ALL_VERSIONS = ["4.0", "4.4", "5.0", "6.0", "7.0", "8.0", "rapid", "latest"] +CPYTHONS = ["3.9", "3.10", "3.11", "3.12", "3.13"] +PYPYS = ["pypy3.9", "pypy3.10"] +ALL_PYTHONS = CPYTHONS + PYPYS +BATCHTIME_WEEK = 10080 +HOSTS = dict() + + +@dataclass +class Host: + name: str + run_on: str + display_name: str + + +HOSTS["rhel8"] = Host("rhel8", "rhel87-small", "RHEL8") +HOSTS["win64"] = Host("win64", "windows-64-vsMulti-small", "Win64") +HOSTS["macos"] = Host("macos", "macos-14", "macOS") + + +############## +# Helpers +############## + + +def create_variant( + task_names: list[str], + display_name: str, + *, + python: str | None = None, + version: str | None = None, + host: str | None = None, + **kwargs: Any, +) -> BuildVariant: + """Create a build variant for the given inputs.""" + task_refs = [EvgTaskRef(name=n) for n in task_names] + kwargs.setdefault("expansions", dict()) + expansions = kwargs.pop("expansions", dict()).copy() + host = host or "rhel8" + run_on = [HOSTS[host].run_on] + name = display_name.replace(" ", "-").lower() + if python: + expansions["PYTHON_BINARY"] = get_python_binary(python, host) + if version: + expansions["VERSION"] = version + expansions = expansions or None + return BuildVariant( + name=name, + display_name=display_name, + tasks=task_refs, + expansions=expansions, + run_on=run_on, + **kwargs, + ) + + +def get_python_binary(python: str, host: str) -> str: + """Get the appropriate python binary given a python version and host.""" + if host == "win64": + is_32 = python.startswith("32-bit") + if is_32: + _, python = python.split() + base = "C:/python/32" + else: + base = "C:/python" + python = python.replace(".", "") + return f"{base}/Python{python}/python.exe" + + if host == "rhel8": + return f"/opt/python/{python}/bin/python3" + + if host == "macos": + return f"/Library/Frameworks/Python.Framework/Versions/{python}/bin/python3" + + raise ValueError(f"no match found for python {python} on {host}") + + +def get_display_name(base: str, host: str, version: str, python: str) -> str: + """Get the display name of a variant.""" + if version not in ["rapid", "latest"]: + version = f"v{version}" + if not python.startswith("pypy"): + python = f"py{python}" + return f"{base} {HOSTS[host].display_name} {version} {python}" + + +def zip_cycle(*iterables, empty_default=None): + """Get all combinations of the inputs, cycling over the shorter list(s).""" + cycles = [cycle(i) for i in iterables] + for _ in zip_longest(*iterables): + yield tuple(next(i, empty_default) for i in cycles) + + +############## +# Variants +############## + + +def create_ocsp_variants() -> list[BuildVariant]: + variants = [] + batchtime = BATCHTIME_WEEK * 2 + expansions = dict(AUTH="noauth", SSL="ssl", TOPOLOGY="server") + base_display = "OCSP test" + + # OCSP tests on rhel8 with all servers v4.4+ and all python versions. + versions = [v for v in ALL_VERSIONS if v != "4.0"] + for version, python in zip_cycle(versions, ALL_PYTHONS): + host = "rhel8" + variant = create_variant( + [".ocsp"], + get_display_name(base_display, host, version, python), + python=python, + version=version, + host=host, + expansions=expansions, + batchtime=batchtime, + ) + variants.append(variant) + + # OCSP tests on Windows and MacOS. + # MongoDB servers on these hosts do not staple OCSP responses and only support RSA. + for host, version in product(["win64", "macos"], ["4.4", "8.0"]): + python = CPYTHONS[0] if version == "4.4" else CPYTHONS[-1] + variant = create_variant( + [".ocsp-rsa !.ocsp-staple"], + get_display_name(base_display, host, version, python), + python=python, + version=version, + host=host, + expansions=expansions, + batchtime=batchtime, + ) + variants.append(variant) + + return variants + + +################## +# Generate Config +################## + +project = EvgProject(tasks=None, buildvariants=create_ocsp_variants()) +print(ShrubService.generate_yaml(project)) # noqa: T201 From a911245bde1377c485f06dfd5373d159b7e8aff7 Mon Sep 17 00:00:00 2001 From: Shane Harvey Date: Mon, 14 Oct 2024 15:06:42 -0700 Subject: [PATCH 031/182] PYTHON-4866 Fix test_command_cursor_to_list_csot_applied (#1926) --- test/asynchronous/test_cursor.py | 14 ++++++-------- test/test_cursor.py | 14 ++++++-------- 2 files changed, 12 insertions(+), 16 deletions(-) diff --git a/test/asynchronous/test_cursor.py b/test/asynchronous/test_cursor.py index e79ad00641..ee0a757ed3 100644 --- a/test/asynchronous/test_cursor.py +++ b/test/asynchronous/test_cursor.py @@ -1412,12 +1412,11 @@ async def test_to_list_length(self): self.assertEqual(len(docs), 2) async def test_to_list_csot_applied(self): - client = await self.async_single_client(timeoutMS=500) + client = await self.async_single_client(timeoutMS=500, w=1) + coll = client.pymongo.test # Initialize the client with a larger timeout to help make test less flakey with pymongo.timeout(10): - await client.admin.command("ping") - coll = client.pymongo.test - await coll.insert_many([{} for _ in range(5)]) + await coll.insert_many([{} for _ in range(5)]) cursor = coll.find({"$where": delay(1)}) with self.assertRaises(PyMongoError) as ctx: await cursor.to_list() @@ -1454,12 +1453,11 @@ async def test_command_cursor_to_list_length(self): @async_client_context.require_failCommand_blockConnection async def test_command_cursor_to_list_csot_applied(self): - client = await self.async_single_client(timeoutMS=500) + client = await self.async_single_client(timeoutMS=500, w=1) + coll = client.pymongo.test # Initialize the client with a larger timeout to help make test less flakey with pymongo.timeout(10): - await client.admin.command("ping") - coll = client.pymongo.test - await coll.insert_many([{} for _ in range(5)]) + await coll.insert_many([{} for _ in range(5)]) fail_command = { "configureFailPoint": "failCommand", "mode": {"times": 5}, diff --git a/test/test_cursor.py b/test/test_cursor.py index 7c073bf351..7a6dfc9429 100644 --- a/test/test_cursor.py +++ b/test/test_cursor.py @@ -1403,12 +1403,11 @@ def test_to_list_length(self): self.assertEqual(len(docs), 2) def test_to_list_csot_applied(self): - client = self.single_client(timeoutMS=500) + client = self.single_client(timeoutMS=500, w=1) + coll = client.pymongo.test # Initialize the client with a larger timeout to help make test less flakey with pymongo.timeout(10): - client.admin.command("ping") - coll = client.pymongo.test - coll.insert_many([{} for _ in range(5)]) + coll.insert_many([{} for _ in range(5)]) cursor = coll.find({"$where": delay(1)}) with self.assertRaises(PyMongoError) as ctx: cursor.to_list() @@ -1445,12 +1444,11 @@ def test_command_cursor_to_list_length(self): @client_context.require_failCommand_blockConnection def test_command_cursor_to_list_csot_applied(self): - client = self.single_client(timeoutMS=500) + client = self.single_client(timeoutMS=500, w=1) + coll = client.pymongo.test # Initialize the client with a larger timeout to help make test less flakey with pymongo.timeout(10): - client.admin.command("ping") - coll = client.pymongo.test - coll.insert_many([{} for _ in range(5)]) + coll.insert_many([{} for _ in range(5)]) fail_command = { "configureFailPoint": "failCommand", "mode": {"times": 5}, From 9e38c54fa03d0f719a43ff023894c2a1ad9b5480 Mon Sep 17 00:00:00 2001 From: Shane Harvey Date: Mon, 14 Oct 2024 15:25:21 -0700 Subject: [PATCH 032/182] PYTHON-4861 Fix HATCH_CONFIG on cygwin (#1927) --- .evergreen/hatch.sh | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.evergreen/hatch.sh b/.evergreen/hatch.sh index 8f862c39d2..6f3d36b389 100644 --- a/.evergreen/hatch.sh +++ b/.evergreen/hatch.sh @@ -29,6 +29,9 @@ else # Set up virtualenv before installing hatch # Ensure hatch does not write to user or global locations. touch hatch_config.toml HATCH_CONFIG=$(pwd)/hatch_config.toml + if [ "Windows_NT" = "$OS" ]; then # Magic variable in cygwin + HATCH_CONFIG=$(cygpath -m "$HATCH_CONFIG") + fi export HATCH_CONFIG hatch config restore hatch config set dirs.data ".hatch/data" From 872fda179e247fb8e1bcc3cf2af3d892788a2e2f Mon Sep 17 00:00:00 2001 From: Noah Stapp Date: Tue, 15 Oct 2024 08:54:42 -0400 Subject: [PATCH 033/182] PYTHON-4574 - FaaS detection logic mistakenly identifies EKS as AWS Lambda (#1908) --- test/asynchronous/test_client.py | 16 ++++++++++++++++ test/test_client.py | 16 ++++++++++++++++ 2 files changed, 32 insertions(+) diff --git a/test/asynchronous/test_client.py b/test/asynchronous/test_client.py index faa23348c9..c6b6416c16 100644 --- a/test/asynchronous/test_client.py +++ b/test/asynchronous/test_client.py @@ -2019,6 +2019,22 @@ async def test_handshake_08_invalid_aws_ec2(self): None, ) + async def test_handshake_09_container_with_provider(self): + await self._test_handshake( + { + ENV_VAR_K8S: "1", + "AWS_LAMBDA_RUNTIME_API": "1", + "AWS_REGION": "us-east-1", + "AWS_LAMBDA_FUNCTION_MEMORY_SIZE": "256", + }, + { + "container": {"orchestrator": "kubernetes"}, + "name": "aws.lambda", + "region": "us-east-1", + "memory_mb": 256, + }, + ) + def test_dict_hints(self): self.db.t.find(hint={"x": 1}) diff --git a/test/test_client.py b/test/test_client.py index be1994dd93..8e3d9c8b8b 100644 --- a/test/test_client.py +++ b/test/test_client.py @@ -1977,6 +1977,22 @@ def test_handshake_08_invalid_aws_ec2(self): None, ) + def test_handshake_09_container_with_provider(self): + self._test_handshake( + { + ENV_VAR_K8S: "1", + "AWS_LAMBDA_RUNTIME_API": "1", + "AWS_REGION": "us-east-1", + "AWS_LAMBDA_FUNCTION_MEMORY_SIZE": "256", + }, + { + "container": {"orchestrator": "kubernetes"}, + "name": "aws.lambda", + "region": "us-east-1", + "memory_mb": 256, + }, + ) + def test_dict_hints(self): self.db.t.find(hint={"x": 1}) From 710bc40c730d2fd982e1cb7a41fd91ac7b5d4498 Mon Sep 17 00:00:00 2001 From: Noah Stapp Date: Tue, 15 Oct 2024 12:12:18 -0400 Subject: [PATCH 034/182] =?UTF-8?q?PYTHON-4870=20-=20MongoClient.address?= =?UTF-8?q?=20should=20block=20until=20a=20connection=20suc=E2=80=A6=20(#1?= =?UTF-8?q?929)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- pymongo/asynchronous/mongo_client.py | 7 ------- pymongo/synchronous/mongo_client.py | 7 ------- test/asynchronous/test_client.py | 2 -- test/test_client.py | 2 -- test/test_replica_set_reconfig.py | 3 ++- 5 files changed, 2 insertions(+), 19 deletions(-) diff --git a/pymongo/asynchronous/mongo_client.py b/pymongo/asynchronous/mongo_client.py index bfae302dac..4e09efe401 100644 --- a/pymongo/asynchronous/mongo_client.py +++ b/pymongo/asynchronous/mongo_client.py @@ -1453,13 +1453,6 @@ async def address(self) -> Optional[tuple[str, int]]: 'Cannot use "address" property when load balancing among' ' mongoses, use "nodes" instead.' ) - if topology_type not in ( - TOPOLOGY_TYPE.ReplicaSetWithPrimary, - TOPOLOGY_TYPE.Single, - TOPOLOGY_TYPE.LoadBalanced, - TOPOLOGY_TYPE.Sharded, - ): - return None return await self._server_property("address") @property diff --git a/pymongo/synchronous/mongo_client.py b/pymongo/synchronous/mongo_client.py index 1351cb200f..815446bb2c 100644 --- a/pymongo/synchronous/mongo_client.py +++ b/pymongo/synchronous/mongo_client.py @@ -1447,13 +1447,6 @@ def address(self) -> Optional[tuple[str, int]]: 'Cannot use "address" property when load balancing among' ' mongoses, use "nodes" instead.' ) - if topology_type not in ( - TOPOLOGY_TYPE.ReplicaSetWithPrimary, - TOPOLOGY_TYPE.Single, - TOPOLOGY_TYPE.LoadBalanced, - TOPOLOGY_TYPE.Sharded, - ): - return None return self._server_property("address") @property diff --git a/test/asynchronous/test_client.py b/test/asynchronous/test_client.py index c6b6416c16..590154b857 100644 --- a/test/asynchronous/test_client.py +++ b/test/asynchronous/test_client.py @@ -838,8 +838,6 @@ async def test_init_disconnected(self): c = await self.async_rs_or_single_client(connect=False) self.assertIsInstance(c.topology_description, TopologyDescription) self.assertEqual(c.topology_description, c._topology._description) - self.assertIsNone(await c.address) # PYTHON-2981 - await c.admin.command("ping") # connect if async_client_context.is_rs: # The primary's host and port are from the replica set config. self.assertIsNotNone(await c.address) diff --git a/test/test_client.py b/test/test_client.py index 8e3d9c8b8b..5bbb5bd751 100644 --- a/test/test_client.py +++ b/test/test_client.py @@ -812,8 +812,6 @@ def test_init_disconnected(self): c = self.rs_or_single_client(connect=False) self.assertIsInstance(c.topology_description, TopologyDescription) self.assertEqual(c.topology_description, c._topology._description) - self.assertIsNone(c.address) # PYTHON-2981 - c.admin.command("ping") # connect if client_context.is_rs: # The primary's host and port are from the replica set config. self.assertIsNotNone(c.address) diff --git a/test/test_replica_set_reconfig.py b/test/test_replica_set_reconfig.py index 1dae0aea86..4c23d71b69 100644 --- a/test/test_replica_set_reconfig.py +++ b/test/test_replica_set_reconfig.py @@ -59,7 +59,8 @@ def test_client(self): with self.assertRaises(ServerSelectionTimeoutError): c.db.command("ping") - self.assertEqual(c.address, None) + with self.assertRaises(ServerSelectionTimeoutError): + _ = c.address # Client can still discover the primary node c.revive_host("a:1") From 82e673d6602b968823768d7c99bb5a676c00eb08 Mon Sep 17 00:00:00 2001 From: Noah Stapp Date: Tue, 15 Oct 2024 14:16:19 -0400 Subject: [PATCH 035/182] PYTHON-4870 - Update changelog for MongoClient.address fix (#1931) --- doc/changelog.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/doc/changelog.rst b/doc/changelog.rst index e7b160b176..44d6fc9a57 100644 --- a/doc/changelog.rst +++ b/doc/changelog.rst @@ -14,6 +14,9 @@ PyMongo 4.11 brings a number of changes including: - Dropped support for MongoDB 3.6. - Added support for free-threaded Python with the GIL disabled. For more information see: `Free-threaded CPython `_. +- :attr:`~pymongo.asynchronous.mongo_client.AsyncMongoClient.address` and + :attr:`~pymongo.mongo_client.MongoClient.address` now correctly block when called on unconnected clients + until either connection succeeds or a server selection timeout error is raised. Issues Resolved ............... From 1b6c0d3a2a7b82f9526f71d5583d11e7674d3c54 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Tue, 15 Oct 2024 13:33:04 -0500 Subject: [PATCH 036/182] PYTHON-4868 Generate server tests using shrub.py (#1930) --- .evergreen/config.yml | 548 ++++++++++++++++++++++---- .evergreen/hatch.sh | 4 +- .evergreen/run-tests.sh | 2 +- .evergreen/scripts/generate_config.py | 121 +++++- 4 files changed, 583 insertions(+), 92 deletions(-) diff --git a/.evergreen/config.yml b/.evergreen/config.yml index dee4b608ec..c3427e66d0 100644 --- a/.evergreen/config.yml +++ b/.evergreen/config.yml @@ -409,6 +409,7 @@ functions: AUTH=${AUTH} \ SSL=${SSL} \ TEST_DATA_LAKE=${TEST_DATA_LAKE} \ + TEST_SUITES=${TEST_SUITES} \ MONGODB_API_VERSION=${MONGODB_API_VERSION} \ SKIP_HATCH=${SKIP_HATCH} \ bash ${PROJECT_DIRECTORY}/.evergreen/hatch.sh test:test-eg @@ -2399,6 +2400,470 @@ axes: batchtime: 10080 # 7 days buildvariants: +# Server Tests for RHEL8. +- name: test-rhel8-py3.9-auth-ssl-cov + tasks: + - name: .standalone + - name: .replica_set + - name: .sharded_cluster + display_name: Test RHEL8 py3.9 Auth SSL cov + run_on: + - rhel87-small + expansions: + AUTH: auth + SSL: ssl + COVERAGE: coverage + PYTHON_BINARY: /opt/python/3.9/bin/python3 + tags: [coverage_tag] +- name: test-rhel8-py3.9-noauth-ssl-cov + tasks: + - name: .standalone + - name: .replica_set + - name: .sharded_cluster + display_name: Test RHEL8 py3.9 NoAuth SSL cov + run_on: + - rhel87-small + expansions: + AUTH: noauth + SSL: ssl + COVERAGE: coverage + PYTHON_BINARY: /opt/python/3.9/bin/python3 + tags: [coverage_tag] +- name: test-rhel8-py3.9-noauth-nossl-cov + tasks: + - name: .standalone + - name: .replica_set + - name: .sharded_cluster + display_name: Test RHEL8 py3.9 NoAuth NoSSL cov + run_on: + - rhel87-small + expansions: + AUTH: noauth + SSL: nossl + COVERAGE: coverage + PYTHON_BINARY: /opt/python/3.9/bin/python3 + tags: [coverage_tag] +- name: test-rhel8-py3.13-auth-ssl-cov + tasks: + - name: .standalone + - name: .replica_set + - name: .sharded_cluster + display_name: Test RHEL8 py3.13 Auth SSL cov + run_on: + - rhel87-small + expansions: + AUTH: auth + SSL: ssl + COVERAGE: coverage + PYTHON_BINARY: /opt/python/3.13/bin/python3 + tags: [coverage_tag] +- name: test-rhel8-py3.13-noauth-ssl-cov + tasks: + - name: .standalone + - name: .replica_set + - name: .sharded_cluster + display_name: Test RHEL8 py3.13 NoAuth SSL cov + run_on: + - rhel87-small + expansions: + AUTH: noauth + SSL: ssl + COVERAGE: coverage + PYTHON_BINARY: /opt/python/3.13/bin/python3 + tags: [coverage_tag] +- name: test-rhel8-py3.13-noauth-nossl-cov + tasks: + - name: .standalone + - name: .replica_set + - name: .sharded_cluster + display_name: Test RHEL8 py3.13 NoAuth NoSSL cov + run_on: + - rhel87-small + expansions: + AUTH: noauth + SSL: nossl + COVERAGE: coverage + PYTHON_BINARY: /opt/python/3.13/bin/python3 + tags: [coverage_tag] +- name: test-rhel8-pypy3.10-auth-ssl-cov + tasks: + - name: .standalone + - name: .replica_set + - name: .sharded_cluster + display_name: Test RHEL8 pypy3.10 Auth SSL cov + run_on: + - rhel87-small + expansions: + AUTH: auth + SSL: ssl + COVERAGE: coverage + PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 + tags: [coverage_tag] +- name: test-rhel8-pypy3.10-noauth-ssl-cov + tasks: + - name: .standalone + - name: .replica_set + - name: .sharded_cluster + display_name: Test RHEL8 pypy3.10 NoAuth SSL cov + run_on: + - rhel87-small + expansions: + AUTH: noauth + SSL: ssl + COVERAGE: coverage + PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 + tags: [coverage_tag] +- name: test-rhel8-pypy3.10-noauth-nossl-cov + tasks: + - name: .standalone + - name: .replica_set + - name: .sharded_cluster + display_name: Test RHEL8 pypy3.10 NoAuth NoSSL cov + run_on: + - rhel87-small + expansions: + AUTH: noauth + SSL: nossl + COVERAGE: coverage + PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 + tags: [coverage_tag] +- name: test-rhel8-py3.10-auth-ssl + tasks: + - name: .standalone + display_name: Test RHEL8 py3.10 Auth SSL + run_on: + - rhel87-small + expansions: + AUTH: auth + SSL: ssl + PYTHON_BINARY: /opt/python/3.10/bin/python3 +- name: test-rhel8-py3.11-noauth-ssl + tasks: + - name: .replica_set + display_name: Test RHEL8 py3.11 NoAuth SSL + run_on: + - rhel87-small + expansions: + AUTH: noauth + SSL: ssl + PYTHON_BINARY: /opt/python/3.11/bin/python3 +- name: test-rhel8-py3.12-noauth-nossl + tasks: + - name: .sharded_cluster + display_name: Test RHEL8 py3.12 NoAuth NoSSL + run_on: + - rhel87-small + expansions: + AUTH: noauth + SSL: nossl + PYTHON_BINARY: /opt/python/3.12/bin/python3 +- name: test-rhel8-pypy3.9-auth-ssl + tasks: + - name: .standalone + display_name: Test RHEL8 pypy3.9 Auth SSL + run_on: + - rhel87-small + expansions: + AUTH: auth + SSL: ssl + PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 + +# Server tests for MacOS. +- name: test-macos-py3.9-auth-ssl-sync + tasks: + - name: .standalone + display_name: Test macOS py3.9 Auth SSL Sync + run_on: + - macos-14 + expansions: + AUTH: auth + SSL: ssl + TEST_SUITES: default + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 + SKIP_CSOT_TESTS: "true" +- name: test-macos-py3.9-auth-ssl-async + tasks: + - name: .standalone + display_name: Test macOS py3.9 Auth SSL Async + run_on: + - macos-14 + expansions: + AUTH: auth + SSL: ssl + TEST_SUITES: default_async + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 + SKIP_CSOT_TESTS: "true" +- name: test-macos-py3.13-noauth-ssl-sync + tasks: + - name: .replica_set + display_name: Test macOS py3.13 NoAuth SSL Sync + run_on: + - macos-14 + expansions: + AUTH: noauth + SSL: ssl + TEST_SUITES: default + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 + SKIP_CSOT_TESTS: "true" +- name: test-macos-py3.13-noauth-ssl-async + tasks: + - name: .replica_set + display_name: Test macOS py3.13 NoAuth SSL Async + run_on: + - macos-14 + expansions: + AUTH: noauth + SSL: ssl + TEST_SUITES: default_async + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 + SKIP_CSOT_TESTS: "true" +- name: test-macos-py3.9-noauth-nossl-sync + tasks: + - name: .sharded_cluster + display_name: Test macOS py3.9 NoAuth NoSSL Sync + run_on: + - macos-14 + expansions: + AUTH: noauth + SSL: nossl + TEST_SUITES: default + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 + SKIP_CSOT_TESTS: "true" +- name: test-macos-py3.9-noauth-nossl-async + tasks: + - name: .sharded_cluster + display_name: Test macOS py3.9 NoAuth NoSSL Async + run_on: + - macos-14 + expansions: + AUTH: noauth + SSL: nossl + TEST_SUITES: default_async + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 + SKIP_CSOT_TESTS: "true" + +# Server tests for macOS Arm64. +- name: test-macos-arm64-py3.9-auth-ssl-sync + tasks: + - name: .standalone + display_name: Test macOS Arm64 py3.9 Auth SSL Sync + run_on: + - macos-14-arm64 + expansions: + AUTH: auth + SSL: ssl + TEST_SUITES: default + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 + SKIP_CSOT_TESTS: "true" +- name: test-macos-arm64-py3.9-auth-ssl-async + tasks: + - name: .standalone + display_name: Test macOS Arm64 py3.9 Auth SSL Async + run_on: + - macos-14-arm64 + expansions: + AUTH: auth + SSL: ssl + TEST_SUITES: default_async + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 + SKIP_CSOT_TESTS: "true" +- name: test-macos-arm64-py3.13-noauth-ssl-sync + tasks: + - name: .replica_set + display_name: Test macOS Arm64 py3.13 NoAuth SSL Sync + run_on: + - macos-14-arm64 + expansions: + AUTH: noauth + SSL: ssl + TEST_SUITES: default + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 + SKIP_CSOT_TESTS: "true" +- name: test-macos-arm64-py3.13-noauth-ssl-async + tasks: + - name: .replica_set + display_name: Test macOS Arm64 py3.13 NoAuth SSL Async + run_on: + - macos-14-arm64 + expansions: + AUTH: noauth + SSL: ssl + TEST_SUITES: default_async + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 + SKIP_CSOT_TESTS: "true" +- name: test-macos-arm64-py3.9-noauth-nossl-sync + tasks: + - name: .sharded_cluster + display_name: Test macOS Arm64 py3.9 NoAuth NoSSL Sync + run_on: + - macos-14-arm64 + expansions: + AUTH: noauth + SSL: nossl + TEST_SUITES: default + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 + SKIP_CSOT_TESTS: "true" +- name: test-macos-arm64-py3.9-noauth-nossl-async + tasks: + - name: .sharded_cluster + display_name: Test macOS Arm64 py3.9 NoAuth NoSSL Async + run_on: + - macos-14-arm64 + expansions: + AUTH: noauth + SSL: nossl + TEST_SUITES: default_async + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 + SKIP_CSOT_TESTS: "true" + +# Server tests for Windows. +- name: test-win64-py3.9-auth-ssl-sync + tasks: + - name: .standalone + display_name: Test Win64 py3.9 Auth SSL Sync + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: auth + SSL: ssl + TEST_SUITES: default + PYTHON_BINARY: C:/python/Python39/python.exe + SKIP_CSOT_TESTS: "true" +- name: test-win64-py3.9-auth-ssl-async + tasks: + - name: .standalone + display_name: Test Win64 py3.9 Auth SSL Async + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: auth + SSL: ssl + TEST_SUITES: default_async + PYTHON_BINARY: C:/python/Python39/python.exe + SKIP_CSOT_TESTS: "true" +- name: test-win64-py3.13-noauth-ssl-sync + tasks: + - name: .replica_set + display_name: Test Win64 py3.13 NoAuth SSL Sync + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: noauth + SSL: ssl + TEST_SUITES: default + PYTHON_BINARY: C:/python/Python313/python.exe + SKIP_CSOT_TESTS: "true" +- name: test-win64-py3.13-noauth-ssl-async + tasks: + - name: .replica_set + display_name: Test Win64 py3.13 NoAuth SSL Async + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: noauth + SSL: ssl + TEST_SUITES: default_async + PYTHON_BINARY: C:/python/Python313/python.exe + SKIP_CSOT_TESTS: "true" +- name: test-win64-py3.9-noauth-nossl-sync + tasks: + - name: .sharded_cluster + display_name: Test Win64 py3.9 NoAuth NoSSL Sync + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: noauth + SSL: nossl + TEST_SUITES: default + PYTHON_BINARY: C:/python/Python39/python.exe + SKIP_CSOT_TESTS: "true" +- name: test-win64-py3.9-noauth-nossl-async + tasks: + - name: .sharded_cluster + display_name: Test Win64 py3.9 NoAuth NoSSL Async + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: noauth + SSL: nossl + TEST_SUITES: default_async + PYTHON_BINARY: C:/python/Python39/python.exe + SKIP_CSOT_TESTS: "true" +- name: test-win32-py3.9-auth-ssl-sync + tasks: + - name: .standalone + display_name: Test Win32 py3.9 Auth SSL Sync + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: auth + SSL: ssl + TEST_SUITES: default + PYTHON_BINARY: C:/python/32/Python39/python.exe + SKIP_CSOT_TESTS: "true" + +# Server tests for Win32. +- name: test-win32-py3.9-auth-ssl-async + tasks: + - name: .standalone + display_name: Test Win32 py3.9 Auth SSL Async + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: auth + SSL: ssl + TEST_SUITES: default_async + PYTHON_BINARY: C:/python/32/Python39/python.exe + SKIP_CSOT_TESTS: "true" +- name: test-win32-py3.13-noauth-ssl-sync + tasks: + - name: .replica_set + display_name: Test Win32 py3.13 NoAuth SSL Sync + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: noauth + SSL: ssl + TEST_SUITES: default + PYTHON_BINARY: C:/python/32/Python313/python.exe + SKIP_CSOT_TESTS: "true" +- name: test-win32-py3.13-noauth-ssl-async + tasks: + - name: .replica_set + display_name: Test Win32 py3.13 NoAuth SSL Async + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: noauth + SSL: ssl + TEST_SUITES: default_async + PYTHON_BINARY: C:/python/32/Python313/python.exe + SKIP_CSOT_TESTS: "true" +- name: test-win32-py3.9-noauth-nossl-sync + tasks: + - name: .sharded_cluster + display_name: Test Win32 py3.9 NoAuth NoSSL Sync + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: noauth + SSL: nossl + TEST_SUITES: default + PYTHON_BINARY: C:/python/32/Python39/python.exe + SKIP_CSOT_TESTS: "true" +- name: test-win32-py3.9-noauth-nossl-async + tasks: + - name: .sharded_cluster + display_name: Test Win32 py3.9 NoAuth NoSSL Async + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: noauth + SSL: nossl + TEST_SUITES: default_async + PYTHON_BINARY: C:/python/32/Python39/python.exe + SKIP_CSOT_TESTS: "true" + - matrix_name: "tests-fips" matrix_spec: platform: @@ -2409,44 +2874,6 @@ buildvariants: tasks: - "test-fips-standalone" -- matrix_name: "test-macos" - matrix_spec: - platform: - # MacOS introduced SSL support with MongoDB >= 3.2. - # Older server versions (2.6, 3.0) are supported without SSL. - - macos - auth: "*" - ssl: "*" - exclude_spec: - # No point testing with SSL without auth. - - platform: macos - auth: "noauth" - ssl: "ssl" - display_name: "${platform} ${auth} ${ssl}" - tasks: - - ".latest" - - ".8.0" - - ".7.0" - - ".6.0" - - ".5.0" - - ".4.4" - - ".4.2" - - ".4.0" - -- matrix_name: "test-macos-arm64" - matrix_spec: - platform: - - macos-arm64 - auth-ssl: "*" - display_name: "${platform} ${auth-ssl}" - tasks: - - ".latest" - - ".8.0" - - ".7.0" - - ".6.0" - - ".5.0" - - ".4.4" - - matrix_name: "test-macos-encryption" matrix_spec: platform: @@ -2486,24 +2913,6 @@ buildvariants: tasks: - ".6.0" -- matrix_name: "tests-python-version-rhel8-test-ssl" - matrix_spec: - platform: rhel8 - python-version: "*" - auth-ssl: "*" - coverage: "*" - display_name: "${python-version} ${platform} ${auth-ssl} ${coverage}" - tasks: &all-server-versions - - ".rapid" - - ".latest" - - ".8.0" - - ".7.0" - - ".6.0" - - ".5.0" - - ".4.4" - - ".4.2" - - ".4.0" - - matrix_name: "tests-pyopenssl" matrix_spec: platform: rhel8 @@ -2580,7 +2989,16 @@ buildvariants: auth-ssl: "*" coverage: "*" display_name: "${c-extensions} ${python-version} ${platform} ${auth} ${ssl} ${coverage}" - tasks: *all-server-versions + tasks: &all-server-versions + - ".rapid" + - ".latest" + - ".8.0" + - ".7.0" + - ".6.0" + - ".5.0" + - ".4.4" + - ".4.2" + - ".4.0" - matrix_name: "tests-python-version-rhel8-compression" matrix_spec: @@ -2629,22 +3047,6 @@ buildvariants: display_name: "${green-framework} ${python-version} ${platform} ${auth-ssl}" tasks: *all-server-versions -- matrix_name: "tests-windows-python-version" - matrix_spec: - platform: windows - python-version-windows: "*" - auth-ssl: "*" - display_name: "${platform} ${python-version-windows} ${auth-ssl}" - tasks: *all-server-versions - -- matrix_name: "tests-windows-python-version-32-bit" - matrix_spec: - platform: windows - python-version-windows-32: "*" - auth-ssl: "*" - display_name: "${platform} ${python-version-windows-32} ${auth-ssl}" - tasks: *all-server-versions - - matrix_name: "tests-python-version-supports-openssl-102-test-ssl" matrix_spec: platform: rhel7 diff --git a/.evergreen/hatch.sh b/.evergreen/hatch.sh index 6f3d36b389..45d5113cd6 100644 --- a/.evergreen/hatch.sh +++ b/.evergreen/hatch.sh @@ -34,8 +34,8 @@ else # Set up virtualenv before installing hatch fi export HATCH_CONFIG hatch config restore - hatch config set dirs.data ".hatch/data" - hatch config set dirs.cache ".hatch/cache" + hatch config set dirs.data "$(pwd)/.hatch/data" + hatch config set dirs.cache "$(pwd)/.hatch/cache" run_hatch() { python -m hatch run "$@" diff --git a/.evergreen/run-tests.sh b/.evergreen/run-tests.sh index 5e8429dd28..364570999f 100755 --- a/.evergreen/run-tests.sh +++ b/.evergreen/run-tests.sh @@ -30,7 +30,7 @@ set -o xtrace AUTH=${AUTH:-noauth} SSL=${SSL:-nossl} -TEST_SUITES="" +TEST_SUITES=${TEST_SUITES:-} TEST_ARGS="${*:1}" export PIP_QUIET=1 # Quiet by default diff --git a/.evergreen/scripts/generate_config.py b/.evergreen/scripts/generate_config.py index e98e527b72..044303ad8f 100644 --- a/.evergreen/scripts/generate_config.py +++ b/.evergreen/scripts/generate_config.py @@ -26,7 +26,17 @@ CPYTHONS = ["3.9", "3.10", "3.11", "3.12", "3.13"] PYPYS = ["pypy3.9", "pypy3.10"] ALL_PYTHONS = CPYTHONS + PYPYS +MIN_MAX_PYTHON = [CPYTHONS[0], CPYTHONS[-1]] BATCHTIME_WEEK = 10080 +AUTH_SSLS = [("auth", "ssl"), ("noauth", "ssl"), ("noauth", "nossl")] +TOPOLOGIES = ["standalone", "replica_set", "sharded_cluster"] +SYNCS = ["sync", "async"] +DISPLAY_LOOKUP = dict( + ssl=dict(ssl="SSL", nossl="NoSSL"), + auth=dict(auth="Auth", noauth="NoAuth"), + test_suites=dict(default="Sync", default_async="Async"), + coverage=dict(coverage="cov"), +) HOSTS = dict() @@ -35,11 +45,18 @@ class Host: name: str run_on: str display_name: str + expansions: dict[str, str] -HOSTS["rhel8"] = Host("rhel8", "rhel87-small", "RHEL8") -HOSTS["win64"] = Host("win64", "windows-64-vsMulti-small", "Win64") -HOSTS["macos"] = Host("macos", "macos-14", "macOS") +_macos_expansions = dict( # CSOT tests are unreliable on slow hosts. + SKIP_CSOT_TESTS="true" +) + +HOSTS["rhel8"] = Host("rhel8", "rhel87-small", "RHEL8", dict()) +HOSTS["win64"] = Host("win64", "windows-64-vsMulti-small", "Win64", _macos_expansions) +HOSTS["win32"] = Host("win32", "windows-64-vsMulti-small", "Win32", _macos_expansions) +HOSTS["macos"] = Host("macos", "macos-14", "macOS", _macos_expansions) +HOSTS["macos-arm64"] = Host("macos-arm64", "macos-14-arm64", "macOS Arm64", _macos_expansions) ############## @@ -67,6 +84,7 @@ def create_variant( expansions["PYTHON_BINARY"] = get_python_binary(python, host) if version: expansions["VERSION"] = version + expansions.update(HOSTS[host].expansions) expansions = expansions or None return BuildVariant( name=name, @@ -80,10 +98,8 @@ def create_variant( def get_python_binary(python: str, host: str) -> str: """Get the appropriate python binary given a python version and host.""" - if host == "win64": - is_32 = python.startswith("32-bit") - if is_32: - _, python = python.split() + if host in ["win64", "win32"]: + if host == "win32": base = "C:/python/32" else: base = "C:/python" @@ -93,19 +109,29 @@ def get_python_binary(python: str, host: str) -> str: if host == "rhel8": return f"/opt/python/{python}/bin/python3" - if host == "macos": + if host in ["macos", "macos-arm64"]: return f"/Library/Frameworks/Python.Framework/Versions/{python}/bin/python3" raise ValueError(f"no match found for python {python} on {host}") -def get_display_name(base: str, host: str, version: str, python: str) -> str: +def get_display_name(base: str, host: str, **kwargs) -> str: """Get the display name of a variant.""" - if version not in ["rapid", "latest"]: - version = f"v{version}" - if not python.startswith("pypy"): - python = f"py{python}" - return f"{base} {HOSTS[host].display_name} {version} {python}" + display_name = f"{base} {HOSTS[host].display_name}" + for key, value in kwargs.items(): + name = value + if key == "version": + if value not in ["rapid", "latest"]: + name = f"v{value}" + elif key == "python": + if not value.startswith("pypy"): + name = f"py{value}" + elif key.lower() in DISPLAY_LOOKUP: + name = DISPLAY_LOOKUP[key.lower()][value] + else: + raise ValueError(f"Missing display handling for {key}") + display_name = f"{display_name} {name}" + return display_name def zip_cycle(*iterables, empty_default=None): @@ -115,6 +141,15 @@ def zip_cycle(*iterables, empty_default=None): yield tuple(next(i, empty_default) for i in cycles) +def generate_yaml(tasks=None, variants=None): + """Generate the yaml for a given set of tasks and variants.""" + project = EvgProject(tasks=tasks, buildvariants=variants) + out = ShrubService.generate_yaml(project) + # Dedent by two spaces to match what we use in config.yml + lines = [line[2:] for line in out.splitlines()] + print("\n".join(lines)) # noqa: T201 + + ############## # Variants ############## @@ -159,9 +194,63 @@ def create_ocsp_variants() -> list[BuildVariant]: return variants +def create_server_variants() -> list[BuildVariant]: + variants = [] + + # Run the full matrix on linux with min and max CPython, and latest pypy. + host = "rhel8" + for python, (auth, ssl) in product([*MIN_MAX_PYTHON, PYPYS[-1]], AUTH_SSLS): + display_name = f"Test {host}" + expansions = dict(AUTH=auth, SSL=ssl, COVERAGE="coverage") + display_name = get_display_name("Test", host, python=python, **expansions) + variant = create_variant( + [f".{t}" for t in TOPOLOGIES], + display_name, + python=python, + host=host, + tags=["coverage_tag"], + expansions=expansions, + ) + variants.append(variant) + + # Test the rest of the pythons on linux. + for python, (auth, ssl), topology in zip_cycle( + CPYTHONS[1:-1] + PYPYS[:-1], AUTH_SSLS, TOPOLOGIES + ): + display_name = f"Test {host}" + expansions = dict(AUTH=auth, SSL=ssl) + display_name = get_display_name("Test", host, python=python, **expansions) + variant = create_variant( + [f".{topology}"], + display_name, + python=python, + host=host, + expansions=expansions, + ) + variants.append(variant) + + # Test a subset on each of the other platforms. + for host in ("macos", "macos-arm64", "win64", "win32"): + for (python, (auth, ssl), topology), sync in product( + zip_cycle(MIN_MAX_PYTHON, AUTH_SSLS, TOPOLOGIES), SYNCS + ): + test_suite = "default" if sync == "sync" else "default_async" + expansions = dict(AUTH=auth, SSL=ssl, TEST_SUITES=test_suite) + display_name = get_display_name("Test", host, python=python, **expansions) + variant = create_variant( + [f".{topology}"], + display_name, + python=python, + host=host, + expansions=expansions, + ) + variants.append(variant) + + return variants + + ################## # Generate Config ################## -project = EvgProject(tasks=None, buildvariants=create_ocsp_variants()) -print(ShrubService.generate_yaml(project)) # noqa: T201 +generate_yaml(variants=create_server_variants()) From 3855effbd844a4c48ca2d13f651ce6dd908b14a3 Mon Sep 17 00:00:00 2001 From: Noah Stapp Date: Tue, 15 Oct 2024 15:16:42 -0400 Subject: [PATCH 037/182] PYTHON-4842 - Convert test.test_create_entities to async (#1919) --- test/asynchronous/test_create_entities.py | 128 ++++++++++++++++++++++ test/test_create_entities.py | 2 + tools/synchro.py | 1 + 3 files changed, 131 insertions(+) create mode 100644 test/asynchronous/test_create_entities.py diff --git a/test/asynchronous/test_create_entities.py b/test/asynchronous/test_create_entities.py new file mode 100644 index 0000000000..cb2ec63f4c --- /dev/null +++ b/test/asynchronous/test_create_entities.py @@ -0,0 +1,128 @@ +# Copyright 2021-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +import sys +import unittest + +sys.path[0:0] = [""] + +from test.asynchronous import AsyncIntegrationTest +from test.asynchronous.unified_format import UnifiedSpecTestMixinV1 + +_IS_SYNC = False + + +class TestCreateEntities(AsyncIntegrationTest): + async def test_store_events_as_entities(self): + self.scenario_runner = UnifiedSpecTestMixinV1() + spec = { + "description": "blank", + "schemaVersion": "1.2", + "createEntities": [ + { + "client": { + "id": "client0", + "storeEventsAsEntities": [ + { + "id": "events1", + "events": [ + "PoolCreatedEvent", + ], + } + ], + } + }, + ], + "tests": [{"description": "foo", "operations": []}], + } + self.scenario_runner.TEST_SPEC = spec + await self.scenario_runner.asyncSetUp() + await self.scenario_runner.run_scenario(spec["tests"][0]) + await self.scenario_runner.entity_map["client0"].close() + final_entity_map = self.scenario_runner.entity_map + self.assertIn("events1", final_entity_map) + self.assertGreater(len(final_entity_map["events1"]), 0) + for event in final_entity_map["events1"]: + self.assertIn("PoolCreatedEvent", event["name"]) + + async def test_store_all_others_as_entities(self): + self.scenario_runner = UnifiedSpecTestMixinV1() + spec = { + "description": "Find", + "schemaVersion": "1.2", + "createEntities": [ + { + "client": { + "id": "client0", + "uriOptions": {"retryReads": True}, + } + }, + {"database": {"id": "database0", "client": "client0", "databaseName": "dat"}}, + { + "collection": { + "id": "collection0", + "database": "database0", + "collectionName": "dat", + } + }, + ], + "tests": [ + { + "description": "test loops", + "operations": [ + { + "name": "loop", + "object": "testRunner", + "arguments": { + "storeIterationsAsEntity": "iterations", + "storeSuccessesAsEntity": "successes", + "storeFailuresAsEntity": "failures", + "storeErrorsAsEntity": "errors", + "numIterations": 5, + "operations": [ + { + "name": "insertOne", + "object": "collection0", + "arguments": {"document": {"_id": 1, "x": 44}}, + }, + { + "name": "insertOne", + "object": "collection0", + "arguments": {"document": {"_id": 2, "x": 44}}, + }, + ], + }, + } + ], + } + ], + } + + await self.client.dat.dat.delete_many({}) + self.scenario_runner.TEST_SPEC = spec + await self.scenario_runner.asyncSetUp() + await self.scenario_runner.run_scenario(spec["tests"][0]) + await self.scenario_runner.entity_map["client0"].close() + entity_map = self.scenario_runner.entity_map + self.assertEqual(len(entity_map["errors"]), 4) + for error in entity_map["errors"]: + self.assertEqual(error["type"], "DuplicateKeyError") + self.assertEqual(entity_map["failures"], []) + self.assertEqual(entity_map["successes"], 2) + self.assertEqual(entity_map["iterations"], 5) + + +if __name__ == "__main__": + unittest.main() diff --git a/test/test_create_entities.py b/test/test_create_entities.py index b7965d4a1d..ad75fe5702 100644 --- a/test/test_create_entities.py +++ b/test/test_create_entities.py @@ -21,6 +21,8 @@ from test import IntegrationTest from test.unified_format import UnifiedSpecTestMixinV1 +_IS_SYNC = True + class TestCreateEntities(IntegrationTest): def test_store_events_as_entities(self): diff --git a/tools/synchro.py b/tools/synchro.py index 25f506ed5a..2123a66616 100644 --- a/tools/synchro.py +++ b/tools/synchro.py @@ -199,6 +199,7 @@ def async_only_test(f: str) -> bool: "test_common.py", "test_connection_logging.py", "test_connections_survive_primary_stepdown_spec.py", + "test_create_entities.py", "test_crud_unified.py", "test_cursor.py", "test_database.py", From fa263dc87dfe13f1c2de14ab86c67871ed3b24fb Mon Sep 17 00:00:00 2001 From: Noah Stapp Date: Tue, 15 Oct 2024 15:48:05 -0400 Subject: [PATCH 038/182] PYTHON-4847 - Convert test.test_collection_management.py to async (#1916) --- .../test_collection_management.py | 41 +++++++++++++++++++ test/asynchronous/unified_format.py | 2 +- test/test_collection_management.py | 12 +++++- test/unified_format.py | 2 +- tools/synchro.py | 1 + 5 files changed, 54 insertions(+), 4 deletions(-) create mode 100644 test/asynchronous/test_collection_management.py diff --git a/test/asynchronous/test_collection_management.py b/test/asynchronous/test_collection_management.py new file mode 100644 index 0000000000..c0edf91581 --- /dev/null +++ b/test/asynchronous/test_collection_management.py @@ -0,0 +1,41 @@ +# Copyright 2021-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Test the collection management unified spec tests.""" +from __future__ import annotations + +import os +import pathlib +import sys + +sys.path[0:0] = [""] + +from test import unittest +from test.asynchronous.unified_format import generate_test_classes + +_IS_SYNC = False + +# Location of JSON test specifications. +if _IS_SYNC: + _TEST_PATH = os.path.join(pathlib.Path(__file__).resolve().parent, "collection_management") +else: + _TEST_PATH = os.path.join( + pathlib.Path(__file__).resolve().parent.parent, "collection_management" + ) + +# Generate unified tests. +globals().update(generate_test_classes(_TEST_PATH, module=__name__)) + +if __name__ == "__main__": + unittest.main() diff --git a/test/asynchronous/unified_format.py b/test/asynchronous/unified_format.py index 42bda59cb2..8f32ac4a2e 100644 --- a/test/asynchronous/unified_format.py +++ b/test/asynchronous/unified_format.py @@ -773,7 +773,7 @@ async def _databaseOperation_listCollections(self, target, *args, **kwargs): if "batch_size" in kwargs: kwargs["cursor"] = {"batchSize": kwargs.pop("batch_size")} cursor = await target.list_collections(*args, **kwargs) - return list(cursor) + return await cursor.to_list() async def _databaseOperation_createCollection(self, target, *args, **kwargs): # PYTHON-1936 Ignore the listCollections event from create_collection. diff --git a/test/test_collection_management.py b/test/test_collection_management.py index 0eacde1302..063c20df8f 100644 --- a/test/test_collection_management.py +++ b/test/test_collection_management.py @@ -16,6 +16,7 @@ from __future__ import annotations import os +import pathlib import sys sys.path[0:0] = [""] @@ -23,11 +24,18 @@ from test import unittest from test.unified_format import generate_test_classes +_IS_SYNC = True + # Location of JSON test specifications. -TEST_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), "collection_management") +if _IS_SYNC: + _TEST_PATH = os.path.join(pathlib.Path(__file__).resolve().parent, "collection_management") +else: + _TEST_PATH = os.path.join( + pathlib.Path(__file__).resolve().parent.parent, "collection_management" + ) # Generate unified tests. -globals().update(generate_test_classes(TEST_PATH, module=__name__)) +globals().update(generate_test_classes(_TEST_PATH, module=__name__)) if __name__ == "__main__": unittest.main() diff --git a/test/unified_format.py b/test/unified_format.py index 13ab0af69b..be7fc1f8ad 100644 --- a/test/unified_format.py +++ b/test/unified_format.py @@ -769,7 +769,7 @@ def _databaseOperation_listCollections(self, target, *args, **kwargs): if "batch_size" in kwargs: kwargs["cursor"] = {"batchSize": kwargs.pop("batch_size")} cursor = target.list_collections(*args, **kwargs) - return list(cursor) + return cursor.to_list() def _databaseOperation_createCollection(self, target, *args, **kwargs): # PYTHON-1936 Ignore the listCollections event from create_collection. diff --git a/tools/synchro.py b/tools/synchro.py index 2123a66616..0a7109c6d4 100644 --- a/tools/synchro.py +++ b/tools/synchro.py @@ -192,6 +192,7 @@ def async_only_test(f: str) -> bool: "test_client_context.py", "test_collation.py", "test_collection.py", + "test_collection_management.py", "test_command_logging.py", "test_command_logging.py", "test_command_monitoring.py", From 8034baec90043c1d3cf4dc17a5481a559743c524 Mon Sep 17 00:00:00 2001 From: "Jeffrey A. Clark" Date: Tue, 15 Oct 2024 18:45:49 -0400 Subject: [PATCH 039/182] PYTHON-4834 Add __repr__ to IndexModel, SearchIndexModel (#1909) --- doc/changelog.rst | 2 ++ pymongo/operations.py | 13 +++++++ test/test_operations.py | 80 +++++++++++++++++++++++++++++++++++++++++ 3 files changed, 95 insertions(+) create mode 100644 test/test_operations.py diff --git a/doc/changelog.rst b/doc/changelog.rst index 44d6fc9a57..3935fa3492 100644 --- a/doc/changelog.rst +++ b/doc/changelog.rst @@ -17,6 +17,8 @@ PyMongo 4.11 brings a number of changes including: - :attr:`~pymongo.asynchronous.mongo_client.AsyncMongoClient.address` and :attr:`~pymongo.mongo_client.MongoClient.address` now correctly block when called on unconnected clients until either connection succeeds or a server selection timeout error is raised. +- Added :func:`repr` support to :class:`pymongo.operations.IndexModel`. +- Added :func:`repr` support to :class:`pymongo.operations.SearchIndexModel`. Issues Resolved ............... diff --git a/pymongo/operations.py b/pymongo/operations.py index d2e1feba69..384ffc94be 100644 --- a/pymongo/operations.py +++ b/pymongo/operations.py @@ -773,6 +773,13 @@ def document(self) -> dict[str, Any]: """ return self.__document + def __repr__(self) -> str: + return "{}({}{})".format( + self.__class__.__name__, + self.document["key"], + "".join([f", {key}={value!r}" for key, value in self.document.items() if key != "key"]), + ) + class SearchIndexModel: """Represents a search index to create.""" @@ -812,3 +819,9 @@ def __init__( def document(self) -> Mapping[str, Any]: """The document for this index.""" return self.__document + + def __repr__(self) -> str: + return "{}({})".format( + self.__class__.__name__, + ", ".join([f"{key}={value!r}" for key, value in self.document.items()]), + ) diff --git a/test/test_operations.py b/test/test_operations.py new file mode 100644 index 0000000000..3ee6677735 --- /dev/null +++ b/test/test_operations.py @@ -0,0 +1,80 @@ +# Copyright 2024-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Test the operations module.""" +from __future__ import annotations + +from test import UnitTest, unittest + +from pymongo import ASCENDING, DESCENDING +from pymongo.collation import Collation +from pymongo.errors import OperationFailure +from pymongo.operations import IndexModel, SearchIndexModel + + +class TestOperationsBase(UnitTest): + """Base class for testing operations module.""" + + def assertRepr(self, obj): + new_obj = eval(repr(obj)) + self.assertEqual(type(new_obj), type(obj)) + self.assertEqual(repr(new_obj), repr(obj)) + + +class TestIndexModel(TestOperationsBase): + """Test IndexModel features.""" + + def test_repr(self): + # Based on examples in test_collection.py + self.assertRepr(IndexModel("hello")) + self.assertRepr(IndexModel([("hello", DESCENDING), ("world", ASCENDING)])) + self.assertRepr( + IndexModel([("hello", DESCENDING), ("world", ASCENDING)], name="hello_world") + ) + # Test all the kwargs + self.assertRepr(IndexModel("name", name="name")) + self.assertRepr(IndexModel("unique", unique=False)) + self.assertRepr(IndexModel("background", background=True)) + self.assertRepr(IndexModel("sparse", sparse=True)) + self.assertRepr(IndexModel("bucketSize", bucketSize=1)) + self.assertRepr(IndexModel("min", min=1)) + self.assertRepr(IndexModel("max", max=1)) + self.assertRepr(IndexModel("expireAfterSeconds", expireAfterSeconds=1)) + self.assertRepr( + IndexModel("partialFilterExpression", partialFilterExpression={"hello": "world"}) + ) + self.assertRepr(IndexModel("collation", collation=Collation(locale="en_US"))) + self.assertRepr(IndexModel("wildcardProjection", wildcardProjection={"$**": 1})) + self.assertRepr(IndexModel("hidden", hidden=False)) + # Test string literal + self.assertEqual(repr(IndexModel("hello")), "IndexModel({'hello': 1}, name='hello_1')") + self.assertEqual( + repr(IndexModel({"hello": 1, "world": -1})), + "IndexModel({'hello': 1, 'world': -1}, name='hello_1_world_-1')", + ) + + +class TestSearchIndexModel(TestOperationsBase): + """Test SearchIndexModel features.""" + + def test_repr(self): + self.assertRepr(SearchIndexModel({"hello": "hello"}, key=1)) + self.assertEqual( + repr(SearchIndexModel({"hello": "hello"}, key=1)), + "SearchIndexModel(definition={'hello': 'hello'}, key=1)", + ) + + +if __name__ == "__main__": + unittest.main() From 463518bf8136264fbed34e3c3ddbc0a34d109156 Mon Sep 17 00:00:00 2001 From: "Jeffrey A. Clark" Date: Wed, 16 Oct 2024 11:02:57 -0400 Subject: [PATCH 040/182] PYTHON-4765 Resync server-selection spec (#1935) --- .../operation-id.json | 4 +- .../server_selection_logging/replica-set.json | 2 +- test/server_selection_logging/sharded.json | 2 +- test/server_selection_logging/standalone.json | 930 +----------------- 4 files changed, 6 insertions(+), 932 deletions(-) diff --git a/test/server_selection_logging/operation-id.json b/test/server_selection_logging/operation-id.json index ccc2623166..72ebff60d8 100644 --- a/test/server_selection_logging/operation-id.json +++ b/test/server_selection_logging/operation-id.json @@ -197,7 +197,7 @@ } }, { - "level": "debug", + "level": "info", "component": "serverSelection", "data": { "message": "Waiting for suitable server to become available", @@ -383,7 +383,7 @@ } }, { - "level": "debug", + "level": "info", "component": "serverSelection", "data": { "message": "Waiting for suitable server to become available", diff --git a/test/server_selection_logging/replica-set.json b/test/server_selection_logging/replica-set.json index 830b1ea51a..5eba784bf2 100644 --- a/test/server_selection_logging/replica-set.json +++ b/test/server_selection_logging/replica-set.json @@ -184,7 +184,7 @@ } }, { - "level": "debug", + "level": "info", "component": "serverSelection", "data": { "message": "Waiting for suitable server to become available", diff --git a/test/server_selection_logging/sharded.json b/test/server_selection_logging/sharded.json index 346c050f9e..d42fba9100 100644 --- a/test/server_selection_logging/sharded.json +++ b/test/server_selection_logging/sharded.json @@ -193,7 +193,7 @@ } }, { - "level": "debug", + "level": "info", "component": "serverSelection", "data": { "message": "Waiting for suitable server to become available", diff --git a/test/server_selection_logging/standalone.json b/test/server_selection_logging/standalone.json index 3152d0bbf3..3b3eddd841 100644 --- a/test/server_selection_logging/standalone.json +++ b/test/server_selection_logging/standalone.json @@ -47,29 +47,9 @@ } } ], - "initialData": [ - { - "collectionName": "server-selection", - "databaseName": "logging-tests", - "documents": [ - { - "_id": 1, - "x": 11 - }, - { - "_id": 2, - "x": 22 - }, - { - "_id": 3, - "x": 33 - } - ] - } - ], "tests": [ { - "description": "A successful insert operation", + "description": "A successful operation", "operations": [ { "name": "waitForEvent", @@ -211,7 +191,7 @@ } }, { - "level": "debug", + "level": "info", "component": "serverSelection", "data": { "message": "Waiting for suitable server to become available", @@ -250,912 +230,6 @@ ] } ] - }, - { - "description": "A successful find operation", - "operations": [ - { - "name": "waitForEvent", - "object": "testRunner", - "arguments": { - "client": "client", - "event": { - "topologyDescriptionChangedEvent": {} - }, - "count": 2 - } - }, - { - "name": "findOne", - "object": "collection", - "arguments": { - "filter": { - "x": 1 - } - } - } - - ], - "expectLogMessages": [ - { - "client": "client", - "messages": [ - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection started", - "selector": { - "$$exists": true - }, - "operation": "find", - "topologyDescription": { - "$$exists": true - } - } - }, - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection succeeded", - "selector": { - "$$exists": true - }, - "operation": "find", - "topologyDescription": { - "$$exists": true - }, - "serverHost": { - "$$type": "string" - }, - "serverPort": { - "$$type": [ - "int", - "long" - ] - } - } - } - ] - } - ] - }, - { - "description": "A successful findAndModify operation", - "operations": [ - { - "name": "waitForEvent", - "object": "testRunner", - "arguments": { - "client": "client", - "event": { - "topologyDescriptionChangedEvent": {} - }, - "count": 2 - } - }, - { - "name": "findOneAndReplace", - "object": "collection", - "arguments": { - "filter": { - "x": 1 - }, - "replacement": { - "x": 11 - } - } - } - ], - "expectLogMessages": [ - { - "client": "client", - "messages": [ - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection started", - "selector": { - "$$exists": true - }, - "operation": "findAndModify", - "topologyDescription": { - "$$exists": true - } - } - }, - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection succeeded", - "selector": { - "$$exists": true - }, - "operation": "findAndModify", - "topologyDescription": { - "$$exists": true - }, - "serverHost": { - "$$type": "string" - }, - "serverPort": { - "$$type": [ - "int", - "long" - ] - } - } - } - ] - } - ] - }, - { - "description": "A successful find and getMore operation", - "operations": [ - { - "name": "waitForEvent", - "object": "testRunner", - "arguments": { - "client": "client", - "event": { - "topologyDescriptionChangedEvent": {} - }, - "count": 2 - } - }, - { - "name": "find", - "object": "collection", - "arguments": { - "batchSize": 3 - } - } - ], - "expectLogMessages": [ - { - "client": "client", - "messages": [ - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection started", - "selector": { - "$$exists": true - }, - "operation": "find", - "topologyDescription": { - "$$exists": true - } - } - }, - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection succeeded", - "selector": { - "$$exists": true - }, - "operation": "find", - "topologyDescription": { - "$$exists": true - }, - "serverHost": { - "$$type": "string" - }, - "serverPort": { - "$$type": [ - "int", - "long" - ] - } - } - }, - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection started", - "selector": { - "$$exists": true - }, - "operation": "getMore", - "topologyDescription": { - "$$exists": true - } - } - }, - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection succeeded", - "selector": { - "$$exists": true - }, - "operation": "getMore", - "topologyDescription": { - "$$exists": true - }, - "serverHost": { - "$$type": "string" - }, - "serverPort": { - "$$type": [ - "int", - "long" - ] - } - } - } - ] - } - ] - }, - { - "description": "A successful aggregate operation", - "operations": [ - { - "name": "waitForEvent", - "object": "testRunner", - "arguments": { - "client": "client", - "event": { - "topologyDescriptionChangedEvent": {} - }, - "count": 2 - } - }, - { - "name": "aggregate", - "object": "collection", - "arguments": { - "pipeline": [ - { - "$match": { - "_id": { - "$gt": 1 - } - } - } - ] - } - } - ], - "expectLogMessages": [ - { - "client": "client", - "messages": [ - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection started", - "selector": { - "$$exists": true - }, - "operation": "aggregate", - "topologyDescription": { - "$$exists": true - } - } - }, - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection succeeded", - "selector": { - "$$exists": true - }, - "operation": "aggregate", - "topologyDescription": { - "$$exists": true - }, - "serverHost": { - "$$type": "string" - }, - "serverPort": { - "$$type": [ - "int", - "long" - ] - } - } - } - ] - } - ] - }, - { - "description": "A successful count operation", - "operations": [ - { - "name": "waitForEvent", - "object": "testRunner", - "arguments": { - "client": "client", - "event": { - "topologyDescriptionChangedEvent": {} - }, - "count": 2 - } - }, - { - "name": "countDocuments", - "object": "collection", - "arguments": { - "filter": {} - } - } - ], - "expectLogMessages": [ - { - "client": "client", - "messages": [ - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection started", - "selector": { - "$$exists": true - }, - "operation": "count", - "topologyDescription": { - "$$exists": true - } - } - }, - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection succeeded", - "selector": { - "$$exists": true - }, - "operation": "count", - "topologyDescription": { - "$$exists": true - }, - "serverHost": { - "$$type": "string" - }, - "serverPort": { - "$$type": [ - "int", - "long" - ] - } - } - } - ] - } - ] - }, - { - "description": "A successful distinct operation", - "operations": [ - { - "name": "waitForEvent", - "object": "testRunner", - "arguments": { - "client": "client", - "event": { - "topologyDescriptionChangedEvent": {} - }, - "count": 2 - } - }, - { - "name": "distinct", - "object": "collection", - "arguments": { - "fieldName": "x", - "filter": {} - } - } - ], - "expectLogMessages": [ - { - "client": "client", - "messages": [ - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection started", - "selector": { - "$$exists": true - }, - "operation": "distinct", - "topologyDescription": { - "$$exists": true - } - } - }, - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection succeeded", - "selector": { - "$$exists": true - }, - "operation": "distinct", - "topologyDescription": { - "$$exists": true - }, - "serverHost": { - "$$type": "string" - }, - "serverPort": { - "$$type": [ - "int", - "long" - ] - } - } - } - ] - } - ] - }, - { - "description": "Successful collection management operations", - "operations": [ - { - "name": "waitForEvent", - "object": "testRunner", - "arguments": { - "client": "client", - "event": { - "topologyDescriptionChangedEvent": {} - }, - "count": 2 - } - }, - { - "name": "createCollection", - "object": "database", - "arguments": { - "collection": "foo" - } - }, - { - "name": "listCollections", - "object": "database" - }, - { - "name": "dropCollection", - "object": "database", - "arguments": { - "collection": "foo" - } - } - ], - "expectLogMessages": [ - { - "client": "client", - "messages": [ - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection started", - "selector": { - "$$exists": true - }, - "operation": "create", - "topologyDescription": { - "$$exists": true - } - } - }, - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection succeeded", - "selector": { - "$$exists": true - }, - "operation": "create", - "topologyDescription": { - "$$exists": true - }, - "serverHost": { - "$$type": "string" - }, - "serverPort": { - "$$type": [ - "int", - "long" - ] - } - } - }, - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection started", - "selector": { - "$$exists": true - }, - "operation": "listCollections", - "topologyDescription": { - "$$exists": true - } - } - }, - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection succeeded", - "selector": { - "$$exists": true - }, - "operation": "listCollections", - "topologyDescription": { - "$$exists": true - }, - "serverHost": { - "$$type": "string" - }, - "serverPort": { - "$$type": [ - "int", - "long" - ] - } - } - }, - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection started", - "selector": { - "$$exists": true - }, - "operation": "drop", - "topologyDescription": { - "$$exists": true - } - } - }, - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection succeeded", - "selector": { - "$$exists": true - }, - "operation": "drop", - "topologyDescription": { - "$$exists": true - }, - "serverHost": { - "$$type": "string" - }, - "serverPort": { - "$$type": [ - "int", - "long" - ] - } - } - } - ] - } - ] - }, - { - "description": "Successful index operations", - "operations": [ - { - "name": "waitForEvent", - "object": "testRunner", - "arguments": { - "client": "client", - "event": { - "topologyDescriptionChangedEvent": {} - }, - "count": 2 - } - }, - { - "name": "createIndex", - "object": "collection", - "arguments": { - "keys": { - "x": 1 - }, - "name": "x_1" - } - }, - { - "name": "listIndexes", - "object": "collection" - }, - { - "name": "dropIndex", - "object": "collection", - "arguments": { - "name": "x_1" - } - } - ], - "expectLogMessages": [ - { - "client": "client", - "messages": [ - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection started", - "selector": { - "$$exists": true - }, - "operation": "createIndexes", - "topologyDescription": { - "$$exists": true - } - } - }, - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection succeeded", - "selector": { - "$$exists": true - }, - "operation": "createIndexes", - "topologyDescription": { - "$$exists": true - }, - "serverHost": { - "$$type": "string" - }, - "serverPort": { - "$$type": [ - "int", - "long" - ] - } - } - }, - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection started", - "selector": { - "$$exists": true - }, - "operation": "listIndexes", - "topologyDescription": { - "$$exists": true - } - } - }, - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection succeeded", - "selector": { - "$$exists": true - }, - "operation": "listIndexes", - "topologyDescription": { - "$$exists": true - }, - "serverHost": { - "$$type": "string" - }, - "serverPort": { - "$$type": [ - "int", - "long" - ] - } - } - }, - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection started", - "selector": { - "$$exists": true - }, - "operation": "dropIndexes", - "topologyDescription": { - "$$exists": true - } - } - }, - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection succeeded", - "selector": { - "$$exists": true - }, - "operation": "dropIndexes", - "topologyDescription": { - "$$exists": true - }, - "serverHost": { - "$$type": "string" - }, - "serverPort": { - "$$type": [ - "int", - "long" - ] - } - } - } - ] - } - ] - }, - { - "description": "A successful update operation", - "operations": [ - { - "name": "waitForEvent", - "object": "testRunner", - "arguments": { - "client": "client", - "event": { - "topologyDescriptionChangedEvent": {} - }, - "count": 2 - } - }, - { - "name": "updateOne", - "object": "collection", - "arguments": { - "filter": { - "x": 1 - }, - "update": { - "$inc": { - "x": 1 - } - } - } - } - ], - "expectLogMessages": [ - { - "client": "client", - "messages": [ - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection started", - "selector": { - "$$exists": true - }, - "operation": "update", - "topologyDescription": { - "$$exists": true - } - } - }, - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection succeeded", - "selector": { - "$$exists": true - }, - "operation": "update", - "topologyDescription": { - "$$exists": true - }, - "serverHost": { - "$$type": "string" - }, - "serverPort": { - "$$type": [ - "int", - "long" - ] - } - } - } - ] - } - ] - }, - { - "description": "A successful delete operation", - "operations": [ - { - "name": "waitForEvent", - "object": "testRunner", - "arguments": { - "client": "client", - "event": { - "topologyDescriptionChangedEvent": {} - }, - "count": 2 - } - }, - { - "name": "deleteOne", - "object": "collection", - "arguments": { - "filter": { - "x": 1 - } - } - } - ], - "expectLogMessages": [ - { - "client": "client", - "messages": [ - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection started", - "selector": { - "$$exists": true - }, - "operation": "delete", - "topologyDescription": { - "$$exists": true - } - } - }, - { - "level": "debug", - "component": "serverSelection", - "data": { - "message": "Server selection succeeded", - "selector": { - "$$exists": true - }, - "operation": "delete", - "topologyDescription": { - "$$exists": true - }, - "serverHost": { - "$$type": "string" - }, - "serverPort": { - "$$type": [ - "int", - "long" - ] - } - } - } - ] - } - ] } ] } From d1375d4178822c376ce3beb0f5987dd7894a03aa Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Wed, 16 Oct 2024 13:41:35 -0500 Subject: [PATCH 041/182] PYTHON-4865 Skip test_write_concern_failure tests temporarily (#1936) --- test/asynchronous/test_bulk.py | 6 ++++++ test/test_bulk.py | 6 ++++++ 2 files changed, 12 insertions(+) diff --git a/test/asynchronous/test_bulk.py b/test/asynchronous/test_bulk.py index 42a3311072..c9ff167b43 100644 --- a/test/asynchronous/test_bulk.py +++ b/test/asynchronous/test_bulk.py @@ -971,6 +971,9 @@ async def cause_wtimeout(self, requests, ordered): @async_client_context.require_replica_set @async_client_context.require_secondaries_count(1) async def test_write_concern_failure_ordered(self): + self.skipTest("Skipping until PYTHON-4865 is resolved.") + details = None + # Ensure we don't raise on wnote. coll_ww = self.coll.with_options(write_concern=WriteConcern(w=self.w)) result = await coll_ww.bulk_write([DeleteOne({"something": "that does no exist"})]) @@ -1051,6 +1054,9 @@ async def test_write_concern_failure_ordered(self): @async_client_context.require_replica_set @async_client_context.require_secondaries_count(1) async def test_write_concern_failure_unordered(self): + self.skipTest("Skipping until PYTHON-4865 is resolved.") + details = None + # Ensure we don't raise on wnote. coll_ww = self.coll.with_options(write_concern=WriteConcern(w=self.w)) result = await coll_ww.bulk_write( diff --git a/test/test_bulk.py b/test/test_bulk.py index 64fd48e8cd..ea2b803804 100644 --- a/test/test_bulk.py +++ b/test/test_bulk.py @@ -969,6 +969,9 @@ def cause_wtimeout(self, requests, ordered): @client_context.require_replica_set @client_context.require_secondaries_count(1) def test_write_concern_failure_ordered(self): + self.skipTest("Skipping until PYTHON-4865 is resolved.") + details = None + # Ensure we don't raise on wnote. coll_ww = self.coll.with_options(write_concern=WriteConcern(w=self.w)) result = coll_ww.bulk_write([DeleteOne({"something": "that does no exist"})]) @@ -1049,6 +1052,9 @@ def test_write_concern_failure_ordered(self): @client_context.require_replica_set @client_context.require_secondaries_count(1) def test_write_concern_failure_unordered(self): + self.skipTest("Skipping until PYTHON-4865 is resolved.") + details = None + # Ensure we don't raise on wnote. coll_ww = self.coll.with_options(write_concern=WriteConcern(w=self.w)) result = coll_ww.bulk_write([DeleteOne({"something": "that does no exist"})], ordered=False) From 29064f5b1d85cbea872a6c37023e3d5fa25b9a3d Mon Sep 17 00:00:00 2001 From: Shane Harvey Date: Wed, 16 Oct 2024 12:15:48 -0700 Subject: [PATCH 042/182] PYTHON-4873 Remove bson-stdint-win32.h from THIRD-PARTY-NOTICES (#1937) --- THIRD-PARTY-NOTICES | 33 --------------------------------- 1 file changed, 33 deletions(-) diff --git a/THIRD-PARTY-NOTICES b/THIRD-PARTY-NOTICES index 0b9fc738ed..55b8ff7078 100644 --- a/THIRD-PARTY-NOTICES +++ b/THIRD-PARTY-NOTICES @@ -38,36 +38,3 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -2) License Notice for bson-stdint-win32.h ------------------------------------------ - -ISO C9x compliant stdint.h for Microsoft Visual Studio -Based on ISO/IEC 9899:TC2 Committee draft (May 6, 2005) WG14/N1124 - - Copyright (c) 2006-2013 Alexander Chemeris - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - - 1. Redistributions of source code must retain the above copyright notice, - this list of conditions and the following disclaimer. - - 2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - - 3. Neither the name of the product nor the names of its contributors may - be used to endorse or promote products derived from this software - without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED -WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO -EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, -PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; -OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, -WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR -OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF -ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. From 6f4258c1cdb95f6fe624a66760a66423048b6884 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Wed, 16 Oct 2024 16:41:14 -0500 Subject: [PATCH 043/182] PYTHON-4576 Allow update to supply sort option (#1881) --- doc/changelog.rst | 4 + pymongo/asynchronous/bulk.py | 13 + pymongo/asynchronous/client_bulk.py | 9 + pymongo/asynchronous/collection.py | 25 + pymongo/operations.py | 44 +- pymongo/synchronous/bulk.py | 13 + pymongo/synchronous/client_bulk.py | 9 + pymongo/synchronous/collection.py | 25 + .../aggregate-write-readPreference.json | 69 --- .../unified/bulkWrite-replaceOne-sort.json | 239 ++++++++ .../unified/bulkWrite-updateOne-sort.json | 255 +++++++++ .../client-bulkWrite-partialResults.json | 540 ++++++++++++++++++ .../client-bulkWrite-replaceOne-sort.json | 162 ++++++ .../client-bulkWrite-updateOne-sort.json | 166 ++++++ .../db-aggregate-write-readPreference.json | 51 -- test/crud/unified/replaceOne-sort.json | 232 ++++++++ test/crud/unified/updateOne-sort.json | 240 ++++++++ test/utils.py | 4 - 18 files changed, 1967 insertions(+), 133 deletions(-) create mode 100644 test/crud/unified/bulkWrite-replaceOne-sort.json create mode 100644 test/crud/unified/bulkWrite-updateOne-sort.json create mode 100644 test/crud/unified/client-bulkWrite-partialResults.json create mode 100644 test/crud/unified/client-bulkWrite-replaceOne-sort.json create mode 100644 test/crud/unified/client-bulkWrite-updateOne-sort.json create mode 100644 test/crud/unified/replaceOne-sort.json create mode 100644 test/crud/unified/updateOne-sort.json diff --git a/doc/changelog.rst b/doc/changelog.rst index 3935fa3492..4c1955d19d 100644 --- a/doc/changelog.rst +++ b/doc/changelog.rst @@ -19,6 +19,10 @@ PyMongo 4.11 brings a number of changes including: until either connection succeeds or a server selection timeout error is raised. - Added :func:`repr` support to :class:`pymongo.operations.IndexModel`. - Added :func:`repr` support to :class:`pymongo.operations.SearchIndexModel`. +- Added ``sort`` parameter to + :meth:`~pymongo.collection.Collection.update_one`, :meth:`~pymongo.collection.Collection.replace_one`, + :class:`~pymongo.operations.UpdateOne`, and + :class:`~pymongo.operations.UpdateMany`, Issues Resolved ............... diff --git a/pymongo/asynchronous/bulk.py b/pymongo/asynchronous/bulk.py index 9d33a990ed..e6cfe5b36e 100644 --- a/pymongo/asynchronous/bulk.py +++ b/pymongo/asynchronous/bulk.py @@ -109,6 +109,7 @@ def __init__( self.uses_array_filters = False self.uses_hint_update = False self.uses_hint_delete = False + self.uses_sort = False self.is_retryable = True self.retrying = False self.started_retryable_write = False @@ -144,6 +145,7 @@ def add_update( collation: Optional[Mapping[str, Any]] = None, array_filters: Optional[list[Mapping[str, Any]]] = None, hint: Union[str, dict[str, Any], None] = None, + sort: Optional[Mapping[str, Any]] = None, ) -> None: """Create an update document and add it to the list of ops.""" validate_ok_for_update(update) @@ -159,6 +161,9 @@ def add_update( if hint is not None: self.uses_hint_update = True cmd["hint"] = hint + if sort is not None: + self.uses_sort = True + cmd["sort"] = sort if multi: # A bulk_write containing an update_many is not retryable. self.is_retryable = False @@ -171,6 +176,7 @@ def add_replace( upsert: bool = False, collation: Optional[Mapping[str, Any]] = None, hint: Union[str, dict[str, Any], None] = None, + sort: Optional[Mapping[str, Any]] = None, ) -> None: """Create a replace document and add it to the list of ops.""" validate_ok_for_replace(replacement) @@ -181,6 +187,9 @@ def add_replace( if hint is not None: self.uses_hint_update = True cmd["hint"] = hint + if sort is not None: + self.uses_sort = True + cmd["sort"] = sort self.ops.append((_UPDATE, cmd)) def add_delete( @@ -699,6 +708,10 @@ async def execute_no_results( raise ConfigurationError( "Must be connected to MongoDB 4.2+ to use hint on unacknowledged update commands." ) + if unack and self.uses_sort and conn.max_wire_version < 25: + raise ConfigurationError( + "Must be connected to MongoDB 8.0+ to use sort on unacknowledged update commands." + ) # Cannot have both unacknowledged writes and bypass document validation. if self.bypass_doc_val: raise OperationFailure( diff --git a/pymongo/asynchronous/client_bulk.py b/pymongo/asynchronous/client_bulk.py index dc800c9549..96571c21eb 100644 --- a/pymongo/asynchronous/client_bulk.py +++ b/pymongo/asynchronous/client_bulk.py @@ -118,6 +118,7 @@ def __init__( self.uses_array_filters = False self.uses_hint_update = False self.uses_hint_delete = False + self.uses_sort = False self.is_retryable = self.client.options.retry_writes self.retrying = False @@ -148,6 +149,7 @@ def add_update( collation: Optional[Mapping[str, Any]] = None, array_filters: Optional[list[Mapping[str, Any]]] = None, hint: Union[str, dict[str, Any], None] = None, + sort: Optional[Mapping[str, Any]] = None, ) -> None: """Create an update document and add it to the list of ops.""" validate_ok_for_update(update) @@ -169,6 +171,9 @@ def add_update( if collation is not None: self.uses_collation = True cmd["collation"] = collation + if sort is not None: + self.uses_sort = True + cmd["sort"] = sort if multi: # A bulk_write containing an update_many is not retryable. self.is_retryable = False @@ -184,6 +189,7 @@ def add_replace( upsert: Optional[bool] = None, collation: Optional[Mapping[str, Any]] = None, hint: Union[str, dict[str, Any], None] = None, + sort: Optional[Mapping[str, Any]] = None, ) -> None: """Create a replace document and add it to the list of ops.""" validate_ok_for_replace(replacement) @@ -202,6 +208,9 @@ def add_replace( if collation is not None: self.uses_collation = True cmd["collation"] = collation + if sort is not None: + self.uses_sort = True + cmd["sort"] = sort self.ops.append(("replace", cmd)) self.namespaces.append(namespace) self.total_ops += 1 diff --git a/pymongo/asynchronous/collection.py b/pymongo/asynchronous/collection.py index 4ddcbab4d2..9b73423627 100644 --- a/pymongo/asynchronous/collection.py +++ b/pymongo/asynchronous/collection.py @@ -993,6 +993,7 @@ async def _update( session: Optional[AsyncClientSession] = None, retryable_write: bool = False, let: Optional[Mapping[str, Any]] = None, + sort: Optional[Mapping[str, Any]] = None, comment: Optional[Any] = None, ) -> Optional[Mapping[str, Any]]: """Internal update / replace helper.""" @@ -1024,6 +1025,14 @@ async def _update( if not isinstance(hint, str): hint = helpers_shared._index_document(hint) update_doc["hint"] = hint + if sort is not None: + if not acknowledged and conn.max_wire_version < 25: + raise ConfigurationError( + "Must be connected to MongoDB 8.0+ to use sort on unacknowledged update commands." + ) + common.validate_is_mapping("sort", sort) + update_doc["sort"] = sort + command = {"update": self.name, "ordered": ordered, "updates": [update_doc]} if let is not None: common.validate_is_mapping("let", let) @@ -1079,6 +1088,7 @@ async def _update_retryable( hint: Optional[_IndexKeyHint] = None, session: Optional[AsyncClientSession] = None, let: Optional[Mapping[str, Any]] = None, + sort: Optional[Mapping[str, Any]] = None, comment: Optional[Any] = None, ) -> Optional[Mapping[str, Any]]: """Internal update / replace helper.""" @@ -1102,6 +1112,7 @@ async def _update( session=session, retryable_write=retryable_write, let=let, + sort=sort, comment=comment, ) @@ -1122,6 +1133,7 @@ async def replace_one( hint: Optional[_IndexKeyHint] = None, session: Optional[AsyncClientSession] = None, let: Optional[Mapping[str, Any]] = None, + sort: Optional[Mapping[str, Any]] = None, comment: Optional[Any] = None, ) -> UpdateResult: """Replace a single document matching the filter. @@ -1176,8 +1188,13 @@ async def replace_one( aggregate expression context (e.g. "$$var"). :param comment: A user-provided comment to attach to this command. + :param sort: Specify which document the operation updates if the query matches + multiple documents. The first document matched by the sort order will be updated. + This option is only supported on MongoDB 8.0 and above. :return: - An instance of :class:`~pymongo.results.UpdateResult`. + .. versionchanged:: 4.11 + Added ``sort`` parameter. .. versionchanged:: 4.1 Added ``let`` parameter. Added ``comment`` parameter. @@ -1209,6 +1226,7 @@ async def replace_one( hint=hint, session=session, let=let, + sort=sort, comment=comment, ), write_concern.acknowledged, @@ -1225,6 +1243,7 @@ async def update_one( hint: Optional[_IndexKeyHint] = None, session: Optional[AsyncClientSession] = None, let: Optional[Mapping[str, Any]] = None, + sort: Optional[Mapping[str, Any]] = None, comment: Optional[Any] = None, ) -> UpdateResult: """Update a single document matching the filter. @@ -1283,11 +1302,16 @@ async def update_one( constant or closed expressions that do not reference document fields. Parameters can then be accessed as variables in an aggregate expression context (e.g. "$$var"). + :param sort: Specify which document the operation updates if the query matches + multiple documents. The first document matched by the sort order will be updated. + This option is only supported on MongoDB 8.0 and above. :param comment: A user-provided comment to attach to this command. :return: - An instance of :class:`~pymongo.results.UpdateResult`. + .. versionchanged:: 4.11 + Added ``sort`` parameter. .. versionchanged:: 4.1 Added ``let`` parameter. Added ``comment`` parameter. @@ -1322,6 +1346,7 @@ async def update_one( hint=hint, session=session, let=let, + sort=sort, comment=comment, ), write_concern.acknowledged, diff --git a/pymongo/operations.py b/pymongo/operations.py index 384ffc94be..8905048c4e 100644 --- a/pymongo/operations.py +++ b/pymongo/operations.py @@ -325,6 +325,7 @@ class ReplaceOne(Generic[_DocumentType]): "_collation", "_hint", "_namespace", + "_sort", ) def __init__( @@ -335,6 +336,7 @@ def __init__( collation: Optional[_CollationIn] = None, hint: Optional[_IndexKeyHint] = None, namespace: Optional[str] = None, + sort: Optional[Mapping[str, Any]] = None, ) -> None: """Create a ReplaceOne instance. @@ -353,8 +355,12 @@ def __init__( :meth:`~pymongo.asynchronous.collection.AsyncCollection.create_index` or :meth:`~pymongo.collection.Collection.create_index` (e.g. ``[('field', ASCENDING)]``). This option is only supported on MongoDB 4.2 and above. + :param sort: Specify which document the operation updates if the query matches + multiple documents. The first document matched by the sort order will be updated. :param namespace: (optional) The namespace in which to replace a document. + .. versionchanged:: 4.10 + Added ``sort`` option. .. versionchanged:: 4.9 Added the `namespace` option to support `MongoClient.bulk_write`. .. versionchanged:: 3.11 @@ -371,6 +377,7 @@ def __init__( else: self._hint = hint + self._sort = sort self._filter = filter self._doc = replacement self._upsert = upsert @@ -385,6 +392,7 @@ def _add_to_bulk(self, bulkobj: _AgnosticBulk) -> None: self._upsert, collation=validate_collation_or_none(self._collation), hint=self._hint, + sort=self._sort, ) def _add_to_client_bulk(self, bulkobj: _AgnosticClientBulk) -> None: @@ -400,6 +408,7 @@ def _add_to_client_bulk(self, bulkobj: _AgnosticClientBulk) -> None: self._upsert, collation=validate_collation_or_none(self._collation), hint=self._hint, + sort=self._sort, ) def __eq__(self, other: Any) -> bool: @@ -411,13 +420,15 @@ def __eq__(self, other: Any) -> bool: other._collation, other._hint, other._namespace, + other._sort, ) == ( self._filter, self._doc, self._upsert, self._collation, - other._hint, + self._hint, self._namespace, + self._sort, ) return NotImplemented @@ -426,7 +437,7 @@ def __ne__(self, other: Any) -> bool: def __repr__(self) -> str: if self._namespace: - return "{}({!r}, {!r}, {!r}, {!r}, {!r}, {!r})".format( + return "{}({!r}, {!r}, {!r}, {!r}, {!r}, {!r}, {!r})".format( self.__class__.__name__, self._filter, self._doc, @@ -434,14 +445,16 @@ def __repr__(self) -> str: self._collation, self._hint, self._namespace, + self._sort, ) - return "{}({!r}, {!r}, {!r}, {!r}, {!r})".format( + return "{}({!r}, {!r}, {!r}, {!r}, {!r}, {!r})".format( self.__class__.__name__, self._filter, self._doc, self._upsert, self._collation, self._hint, + self._sort, ) @@ -456,6 +469,7 @@ class _UpdateOp: "_array_filters", "_hint", "_namespace", + "_sort", ) def __init__( @@ -467,6 +481,7 @@ def __init__( array_filters: Optional[list[Mapping[str, Any]]], hint: Optional[_IndexKeyHint], namespace: Optional[str], + sort: Optional[Mapping[str, Any]], ): if filter is not None: validate_is_mapping("filter", filter) @@ -478,13 +493,13 @@ def __init__( self._hint: Union[str, dict[str, Any], None] = helpers_shared._index_document(hint) else: self._hint = hint - self._filter = filter self._doc = doc self._upsert = upsert self._collation = collation self._array_filters = array_filters self._namespace = namespace + self._sort = sort def __eq__(self, other: object) -> bool: if isinstance(other, type(self)): @@ -496,6 +511,7 @@ def __eq__(self, other: object) -> bool: other._array_filters, other._hint, other._namespace, + other._sort, ) == ( self._filter, self._doc, @@ -504,6 +520,7 @@ def __eq__(self, other: object) -> bool: self._array_filters, self._hint, self._namespace, + self._sort, ) return NotImplemented @@ -512,7 +529,7 @@ def __ne__(self, other: Any) -> bool: def __repr__(self) -> str: if self._namespace: - return "{}({!r}, {!r}, {!r}, {!r}, {!r}, {!r}, {!r})".format( + return "{}({!r}, {!r}, {!r}, {!r}, {!r}, {!r}, {!r}, {!r})".format( self.__class__.__name__, self._filter, self._doc, @@ -521,8 +538,9 @@ def __repr__(self) -> str: self._array_filters, self._hint, self._namespace, + self._sort, ) - return "{}({!r}, {!r}, {!r}, {!r}, {!r}, {!r})".format( + return "{}({!r}, {!r}, {!r}, {!r}, {!r}, {!r}, {!r})".format( self.__class__.__name__, self._filter, self._doc, @@ -530,6 +548,7 @@ def __repr__(self) -> str: self._collation, self._array_filters, self._hint, + self._sort, ) @@ -547,6 +566,7 @@ def __init__( array_filters: Optional[list[Mapping[str, Any]]] = None, hint: Optional[_IndexKeyHint] = None, namespace: Optional[str] = None, + sort: Optional[Mapping[str, Any]] = None, ) -> None: """Represents an update_one operation. @@ -567,8 +587,12 @@ def __init__( :meth:`~pymongo.asynchronous.collection.AsyncCollection.create_index` or :meth:`~pymongo.collection.Collection.create_index` (e.g. ``[('field', ASCENDING)]``). This option is only supported on MongoDB 4.2 and above. - :param namespace: (optional) The namespace in which to update a document. + :param namespace: The namespace in which to update a document. + :param sort: Specify which document the operation updates if the query matches + multiple documents. The first document matched by the sort order will be updated. + .. versionchanged:: 4.10 + Added ``sort`` option. .. versionchanged:: 4.9 Added the `namespace` option to support `MongoClient.bulk_write`. .. versionchanged:: 3.11 @@ -580,7 +604,7 @@ def __init__( .. versionchanged:: 3.5 Added the `collation` option. """ - super().__init__(filter, update, upsert, collation, array_filters, hint, namespace) + super().__init__(filter, update, upsert, collation, array_filters, hint, namespace, sort) def _add_to_bulk(self, bulkobj: _AgnosticBulk) -> None: """Add this operation to the _AsyncBulk/_Bulk instance `bulkobj`.""" @@ -592,6 +616,7 @@ def _add_to_bulk(self, bulkobj: _AgnosticBulk) -> None: collation=validate_collation_or_none(self._collation), array_filters=self._array_filters, hint=self._hint, + sort=self._sort, ) def _add_to_client_bulk(self, bulkobj: _AgnosticClientBulk) -> None: @@ -609,6 +634,7 @@ def _add_to_client_bulk(self, bulkobj: _AgnosticClientBulk) -> None: collation=validate_collation_or_none(self._collation), array_filters=self._array_filters, hint=self._hint, + sort=self._sort, ) @@ -659,7 +685,7 @@ def __init__( .. versionchanged:: 3.5 Added the `collation` option. """ - super().__init__(filter, update, upsert, collation, array_filters, hint, namespace) + super().__init__(filter, update, upsert, collation, array_filters, hint, namespace, None) def _add_to_bulk(self, bulkobj: _AgnosticBulk) -> None: """Add this operation to the _AsyncBulk/_Bulk instance `bulkobj`.""" diff --git a/pymongo/synchronous/bulk.py b/pymongo/synchronous/bulk.py index c658157ea1..7fb29a977f 100644 --- a/pymongo/synchronous/bulk.py +++ b/pymongo/synchronous/bulk.py @@ -109,6 +109,7 @@ def __init__( self.uses_array_filters = False self.uses_hint_update = False self.uses_hint_delete = False + self.uses_sort = False self.is_retryable = True self.retrying = False self.started_retryable_write = False @@ -144,6 +145,7 @@ def add_update( collation: Optional[Mapping[str, Any]] = None, array_filters: Optional[list[Mapping[str, Any]]] = None, hint: Union[str, dict[str, Any], None] = None, + sort: Optional[Mapping[str, Any]] = None, ) -> None: """Create an update document and add it to the list of ops.""" validate_ok_for_update(update) @@ -159,6 +161,9 @@ def add_update( if hint is not None: self.uses_hint_update = True cmd["hint"] = hint + if sort is not None: + self.uses_sort = True + cmd["sort"] = sort if multi: # A bulk_write containing an update_many is not retryable. self.is_retryable = False @@ -171,6 +176,7 @@ def add_replace( upsert: bool = False, collation: Optional[Mapping[str, Any]] = None, hint: Union[str, dict[str, Any], None] = None, + sort: Optional[Mapping[str, Any]] = None, ) -> None: """Create a replace document and add it to the list of ops.""" validate_ok_for_replace(replacement) @@ -181,6 +187,9 @@ def add_replace( if hint is not None: self.uses_hint_update = True cmd["hint"] = hint + if sort is not None: + self.uses_sort = True + cmd["sort"] = sort self.ops.append((_UPDATE, cmd)) def add_delete( @@ -697,6 +706,10 @@ def execute_no_results( raise ConfigurationError( "Must be connected to MongoDB 4.2+ to use hint on unacknowledged update commands." ) + if unack and self.uses_sort and conn.max_wire_version < 25: + raise ConfigurationError( + "Must be connected to MongoDB 8.0+ to use sort on unacknowledged update commands." + ) # Cannot have both unacknowledged writes and bypass document validation. if self.bypass_doc_val: raise OperationFailure( diff --git a/pymongo/synchronous/client_bulk.py b/pymongo/synchronous/client_bulk.py index f41f0203f2..2c38b1d76c 100644 --- a/pymongo/synchronous/client_bulk.py +++ b/pymongo/synchronous/client_bulk.py @@ -118,6 +118,7 @@ def __init__( self.uses_array_filters = False self.uses_hint_update = False self.uses_hint_delete = False + self.uses_sort = False self.is_retryable = self.client.options.retry_writes self.retrying = False @@ -148,6 +149,7 @@ def add_update( collation: Optional[Mapping[str, Any]] = None, array_filters: Optional[list[Mapping[str, Any]]] = None, hint: Union[str, dict[str, Any], None] = None, + sort: Optional[Mapping[str, Any]] = None, ) -> None: """Create an update document and add it to the list of ops.""" validate_ok_for_update(update) @@ -169,6 +171,9 @@ def add_update( if collation is not None: self.uses_collation = True cmd["collation"] = collation + if sort is not None: + self.uses_sort = True + cmd["sort"] = sort if multi: # A bulk_write containing an update_many is not retryable. self.is_retryable = False @@ -184,6 +189,7 @@ def add_replace( upsert: Optional[bool] = None, collation: Optional[Mapping[str, Any]] = None, hint: Union[str, dict[str, Any], None] = None, + sort: Optional[Mapping[str, Any]] = None, ) -> None: """Create a replace document and add it to the list of ops.""" validate_ok_for_replace(replacement) @@ -202,6 +208,9 @@ def add_replace( if collation is not None: self.uses_collation = True cmd["collation"] = collation + if sort is not None: + self.uses_sort = True + cmd["sort"] = sort self.ops.append(("replace", cmd)) self.namespaces.append(namespace) self.total_ops += 1 diff --git a/pymongo/synchronous/collection.py b/pymongo/synchronous/collection.py index 6fd2ac82dd..6edfddc9a9 100644 --- a/pymongo/synchronous/collection.py +++ b/pymongo/synchronous/collection.py @@ -992,6 +992,7 @@ def _update( session: Optional[ClientSession] = None, retryable_write: bool = False, let: Optional[Mapping[str, Any]] = None, + sort: Optional[Mapping[str, Any]] = None, comment: Optional[Any] = None, ) -> Optional[Mapping[str, Any]]: """Internal update / replace helper.""" @@ -1023,6 +1024,14 @@ def _update( if not isinstance(hint, str): hint = helpers_shared._index_document(hint) update_doc["hint"] = hint + if sort is not None: + if not acknowledged and conn.max_wire_version < 25: + raise ConfigurationError( + "Must be connected to MongoDB 8.0+ to use sort on unacknowledged update commands." + ) + common.validate_is_mapping("sort", sort) + update_doc["sort"] = sort + command = {"update": self.name, "ordered": ordered, "updates": [update_doc]} if let is not None: common.validate_is_mapping("let", let) @@ -1078,6 +1087,7 @@ def _update_retryable( hint: Optional[_IndexKeyHint] = None, session: Optional[ClientSession] = None, let: Optional[Mapping[str, Any]] = None, + sort: Optional[Mapping[str, Any]] = None, comment: Optional[Any] = None, ) -> Optional[Mapping[str, Any]]: """Internal update / replace helper.""" @@ -1101,6 +1111,7 @@ def _update( session=session, retryable_write=retryable_write, let=let, + sort=sort, comment=comment, ) @@ -1121,6 +1132,7 @@ def replace_one( hint: Optional[_IndexKeyHint] = None, session: Optional[ClientSession] = None, let: Optional[Mapping[str, Any]] = None, + sort: Optional[Mapping[str, Any]] = None, comment: Optional[Any] = None, ) -> UpdateResult: """Replace a single document matching the filter. @@ -1175,8 +1187,13 @@ def replace_one( aggregate expression context (e.g. "$$var"). :param comment: A user-provided comment to attach to this command. + :param sort: Specify which document the operation updates if the query matches + multiple documents. The first document matched by the sort order will be updated. + This option is only supported on MongoDB 8.0 and above. :return: - An instance of :class:`~pymongo.results.UpdateResult`. + .. versionchanged:: 4.11 + Added ``sort`` parameter. .. versionchanged:: 4.1 Added ``let`` parameter. Added ``comment`` parameter. @@ -1208,6 +1225,7 @@ def replace_one( hint=hint, session=session, let=let, + sort=sort, comment=comment, ), write_concern.acknowledged, @@ -1224,6 +1242,7 @@ def update_one( hint: Optional[_IndexKeyHint] = None, session: Optional[ClientSession] = None, let: Optional[Mapping[str, Any]] = None, + sort: Optional[Mapping[str, Any]] = None, comment: Optional[Any] = None, ) -> UpdateResult: """Update a single document matching the filter. @@ -1282,11 +1301,16 @@ def update_one( constant or closed expressions that do not reference document fields. Parameters can then be accessed as variables in an aggregate expression context (e.g. "$$var"). + :param sort: Specify which document the operation updates if the query matches + multiple documents. The first document matched by the sort order will be updated. + This option is only supported on MongoDB 8.0 and above. :param comment: A user-provided comment to attach to this command. :return: - An instance of :class:`~pymongo.results.UpdateResult`. + .. versionchanged:: 4.11 + Added ``sort`` parameter. .. versionchanged:: 4.1 Added ``let`` parameter. Added ``comment`` parameter. @@ -1321,6 +1345,7 @@ def update_one( hint=hint, session=session, let=let, + sort=sort, comment=comment, ), write_concern.acknowledged, diff --git a/test/crud/unified/aggregate-write-readPreference.json b/test/crud/unified/aggregate-write-readPreference.json index bc887e83cb..c1fa3b4574 100644 --- a/test/crud/unified/aggregate-write-readPreference.json +++ b/test/crud/unified/aggregate-write-readPreference.json @@ -78,11 +78,6 @@ "x": 33 } ] - }, - { - "collectionName": "coll1", - "databaseName": "db0", - "documents": [] } ], "tests": [ @@ -159,22 +154,6 @@ } ] } - ], - "outcome": [ - { - "collectionName": "coll1", - "databaseName": "db0", - "documents": [ - { - "_id": 2, - "x": 22 - }, - { - "_id": 3, - "x": 33 - } - ] - } ] }, { @@ -250,22 +229,6 @@ } ] } - ], - "outcome": [ - { - "collectionName": "coll1", - "databaseName": "db0", - "documents": [ - { - "_id": 2, - "x": 22 - }, - { - "_id": 3, - "x": 33 - } - ] - } ] }, { @@ -344,22 +307,6 @@ } ] } - ], - "outcome": [ - { - "collectionName": "coll1", - "databaseName": "db0", - "documents": [ - { - "_id": 2, - "x": 22 - }, - { - "_id": 3, - "x": 33 - } - ] - } ] }, { @@ -438,22 +385,6 @@ } ] } - ], - "outcome": [ - { - "collectionName": "coll1", - "databaseName": "db0", - "documents": [ - { - "_id": 2, - "x": 22 - }, - { - "_id": 3, - "x": 33 - } - ] - } ] } ] diff --git a/test/crud/unified/bulkWrite-replaceOne-sort.json b/test/crud/unified/bulkWrite-replaceOne-sort.json new file mode 100644 index 0000000000..c0bd383514 --- /dev/null +++ b/test/crud/unified/bulkWrite-replaceOne-sort.json @@ -0,0 +1,239 @@ +{ + "description": "BulkWrite replaceOne-sort", + "schemaVersion": "1.0", + "createEntities": [ + { + "client": { + "id": "client0", + "observeEvents": [ + "commandStartedEvent", + "commandSucceededEvent" + ] + } + }, + { + "database": { + "id": "database0", + "client": "client0", + "databaseName": "crud-tests" + } + }, + { + "collection": { + "id": "collection0", + "database": "database0", + "collectionName": "coll0" + } + } + ], + "initialData": [ + { + "collectionName": "coll0", + "databaseName": "crud-tests", + "documents": [ + { + "_id": 1, + "x": 11 + }, + { + "_id": 2, + "x": 22 + }, + { + "_id": 3, + "x": 33 + } + ] + } + ], + "tests": [ + { + "description": "BulkWrite replaceOne with sort option", + "runOnRequirements": [ + { + "minServerVersion": "8.0" + } + ], + "operations": [ + { + "object": "collection0", + "name": "bulkWrite", + "arguments": { + "requests": [ + { + "replaceOne": { + "filter": { + "_id": { + "$gt": 1 + } + }, + "sort": { + "_id": -1 + }, + "replacement": { + "x": 1 + } + } + } + ] + } + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "update": "coll0", + "updates": [ + { + "q": { + "_id": { + "$gt": 1 + } + }, + "u": { + "x": 1 + }, + "sort": { + "_id": -1 + }, + "multi": { + "$$unsetOrMatches": false + }, + "upsert": { + "$$unsetOrMatches": false + } + } + ] + } + } + }, + { + "commandSucceededEvent": { + "reply": { + "ok": 1, + "n": 1 + }, + "commandName": "update" + } + } + ] + } + ], + "outcome": [ + { + "collectionName": "coll0", + "databaseName": "crud-tests", + "documents": [ + { + "_id": 1, + "x": 11 + }, + { + "_id": 2, + "x": 22 + }, + { + "_id": 3, + "x": 1 + } + ] + } + ] + }, + { + "description": "BulkWrite replaceOne with sort option unsupported (server-side error)", + "runOnRequirements": [ + { + "maxServerVersion": "7.99" + } + ], + "operations": [ + { + "object": "collection0", + "name": "bulkWrite", + "arguments": { + "requests": [ + { + "replaceOne": { + "filter": { + "_id": { + "$gt": 1 + } + }, + "sort": { + "_id": -1 + }, + "replacement": { + "x": 1 + } + } + } + ] + }, + "expectError": { + "isClientError": false + } + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "update": "coll0", + "updates": [ + { + "q": { + "_id": { + "$gt": 1 + } + }, + "u": { + "x": 1 + }, + "sort": { + "_id": -1 + }, + "multi": { + "$$unsetOrMatches": false + }, + "upsert": { + "$$unsetOrMatches": false + } + } + ] + } + } + } + ] + } + ], + "outcome": [ + { + "collectionName": "coll0", + "databaseName": "crud-tests", + "documents": [ + { + "_id": 1, + "x": 11 + }, + { + "_id": 2, + "x": 22 + }, + { + "_id": 3, + "x": 33 + } + ] + } + ] + } + ] +} diff --git a/test/crud/unified/bulkWrite-updateOne-sort.json b/test/crud/unified/bulkWrite-updateOne-sort.json new file mode 100644 index 0000000000..f78bd3bf3e --- /dev/null +++ b/test/crud/unified/bulkWrite-updateOne-sort.json @@ -0,0 +1,255 @@ +{ + "description": "BulkWrite updateOne-sort", + "schemaVersion": "1.0", + "createEntities": [ + { + "client": { + "id": "client0", + "observeEvents": [ + "commandStartedEvent", + "commandSucceededEvent" + ] + } + }, + { + "database": { + "id": "database0", + "client": "client0", + "databaseName": "crud-tests" + } + }, + { + "collection": { + "id": "collection0", + "database": "database0", + "collectionName": "coll0" + } + } + ], + "initialData": [ + { + "collectionName": "coll0", + "databaseName": "crud-tests", + "documents": [ + { + "_id": 1, + "x": 11 + }, + { + "_id": 2, + "x": 22 + }, + { + "_id": 3, + "x": 33 + } + ] + } + ], + "tests": [ + { + "description": "BulkWrite updateOne with sort option", + "runOnRequirements": [ + { + "minServerVersion": "8.0" + } + ], + "operations": [ + { + "object": "collection0", + "name": "bulkWrite", + "arguments": { + "requests": [ + { + "updateOne": { + "filter": { + "_id": { + "$gt": 1 + } + }, + "sort": { + "_id": -1 + }, + "update": [ + { + "$set": { + "x": 1 + } + } + ] + } + } + ] + } + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "update": "coll0", + "updates": [ + { + "q": { + "_id": { + "$gt": 1 + } + }, + "u": [ + { + "$set": { + "x": 1 + } + } + ], + "sort": { + "_id": -1 + }, + "multi": { + "$$unsetOrMatches": false + }, + "upsert": { + "$$unsetOrMatches": false + } + } + ] + } + } + }, + { + "commandSucceededEvent": { + "reply": { + "ok": 1, + "n": 1 + }, + "commandName": "update" + } + } + ] + } + ], + "outcome": [ + { + "collectionName": "coll0", + "databaseName": "crud-tests", + "documents": [ + { + "_id": 1, + "x": 11 + }, + { + "_id": 2, + "x": 22 + }, + { + "_id": 3, + "x": 1 + } + ] + } + ] + }, + { + "description": "BulkWrite updateOne with sort option unsupported (server-side error)", + "runOnRequirements": [ + { + "maxServerVersion": "7.99" + } + ], + "operations": [ + { + "object": "collection0", + "name": "bulkWrite", + "arguments": { + "requests": [ + { + "updateOne": { + "filter": { + "_id": { + "$gt": 1 + } + }, + "sort": { + "_id": -1 + }, + "update": [ + { + "$set": { + "x": 1 + } + } + ] + } + } + ] + }, + "expectError": { + "isClientError": false + } + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "update": "coll0", + "updates": [ + { + "q": { + "_id": { + "$gt": 1 + } + }, + "u": [ + { + "$set": { + "x": 1 + } + } + ], + "sort": { + "_id": -1 + }, + "multi": { + "$$unsetOrMatches": false + }, + "upsert": { + "$$unsetOrMatches": false + } + } + ] + } + } + } + ] + } + ], + "outcome": [ + { + "collectionName": "coll0", + "databaseName": "crud-tests", + "documents": [ + { + "_id": 1, + "x": 11 + }, + { + "_id": 2, + "x": 22 + }, + { + "_id": 3, + "x": 33 + } + ] + } + ] + } + ] +} diff --git a/test/crud/unified/client-bulkWrite-partialResults.json b/test/crud/unified/client-bulkWrite-partialResults.json new file mode 100644 index 0000000000..b35e94a2ea --- /dev/null +++ b/test/crud/unified/client-bulkWrite-partialResults.json @@ -0,0 +1,540 @@ +{ + "description": "client bulkWrite partial results", + "schemaVersion": "1.4", + "runOnRequirements": [ + { + "minServerVersion": "8.0", + "serverless": "forbid" + } + ], + "createEntities": [ + { + "client": { + "id": "client0" + } + }, + { + "database": { + "id": "database0", + "client": "client0", + "databaseName": "crud-tests" + } + }, + { + "collection": { + "id": "collection0", + "database": "database0", + "collectionName": "coll0" + } + } + ], + "initialData": [ + { + "collectionName": "coll0", + "databaseName": "crud-tests", + "documents": [ + { + "_id": 1, + "x": 11 + } + ] + } + ], + "_yamlAnchors": { + "namespace": "crud-tests.coll0", + "newDocument": { + "_id": 2, + "x": 22 + } + }, + "tests": [ + { + "description": "partialResult is unset when first operation fails during an ordered bulk write (verbose)", + "operations": [ + { + "object": "client0", + "name": "clientBulkWrite", + "arguments": { + "models": [ + { + "insertOne": { + "namespace": "crud-tests.coll0", + "document": { + "_id": 1, + "x": 11 + } + } + }, + { + "insertOne": { + "namespace": "crud-tests.coll0", + "document": { + "_id": 2, + "x": 22 + } + } + } + ], + "ordered": true, + "verboseResults": true + }, + "expectError": { + "expectResult": { + "$$unsetOrMatches": { + "insertedCount": { + "$$exists": false + }, + "upsertedCount": { + "$$exists": false + }, + "matchedCount": { + "$$exists": false + }, + "modifiedCount": { + "$$exists": false + }, + "deletedCount": { + "$$exists": false + }, + "insertResults": { + "$$exists": false + }, + "updateResults": { + "$$exists": false + }, + "deleteResults": { + "$$exists": false + } + } + } + } + } + ] + }, + { + "description": "partialResult is unset when first operation fails during an ordered bulk write (summary)", + "operations": [ + { + "object": "client0", + "name": "clientBulkWrite", + "arguments": { + "models": [ + { + "insertOne": { + "namespace": "crud-tests.coll0", + "document": { + "_id": 1, + "x": 11 + } + } + }, + { + "insertOne": { + "namespace": "crud-tests.coll0", + "document": { + "_id": 2, + "x": 22 + } + } + } + ], + "ordered": true, + "verboseResults": false + }, + "expectError": { + "expectResult": { + "$$unsetOrMatches": { + "insertedCount": { + "$$exists": false + }, + "upsertedCount": { + "$$exists": false + }, + "matchedCount": { + "$$exists": false + }, + "modifiedCount": { + "$$exists": false + }, + "deletedCount": { + "$$exists": false + }, + "insertResults": { + "$$exists": false + }, + "updateResults": { + "$$exists": false + }, + "deleteResults": { + "$$exists": false + } + } + } + } + } + ] + }, + { + "description": "partialResult is set when second operation fails during an ordered bulk write (verbose)", + "operations": [ + { + "object": "client0", + "name": "clientBulkWrite", + "arguments": { + "models": [ + { + "insertOne": { + "namespace": "crud-tests.coll0", + "document": { + "_id": 2, + "x": 22 + } + } + }, + { + "insertOne": { + "namespace": "crud-tests.coll0", + "document": { + "_id": 1, + "x": 11 + } + } + } + ], + "ordered": true, + "verboseResults": true + }, + "expectError": { + "expectResult": { + "insertedCount": 1, + "upsertedCount": 0, + "matchedCount": 0, + "modifiedCount": 0, + "deletedCount": 0, + "insertResults": { + "0": { + "insertedId": 2 + } + }, + "updateResults": {}, + "deleteResults": {} + } + } + } + ] + }, + { + "description": "partialResult is set when second operation fails during an ordered bulk write (summary)", + "operations": [ + { + "object": "client0", + "name": "clientBulkWrite", + "arguments": { + "models": [ + { + "insertOne": { + "namespace": "crud-tests.coll0", + "document": { + "_id": 2, + "x": 22 + } + } + }, + { + "insertOne": { + "namespace": "crud-tests.coll0", + "document": { + "_id": 1, + "x": 11 + } + } + } + ], + "ordered": true, + "verboseResults": false + }, + "expectError": { + "expectResult": { + "insertedCount": 1, + "upsertedCount": 0, + "matchedCount": 0, + "modifiedCount": 0, + "deletedCount": 0, + "insertResults": { + "$$unsetOrMatches": {} + }, + "updateResults": { + "$$unsetOrMatches": {} + }, + "deleteResults": { + "$$unsetOrMatches": {} + } + } + } + } + ] + }, + { + "description": "partialResult is unset when all operations fail during an unordered bulk write", + "operations": [ + { + "object": "client0", + "name": "clientBulkWrite", + "arguments": { + "models": [ + { + "insertOne": { + "namespace": "crud-tests.coll0", + "document": { + "_id": 1, + "x": 11 + } + } + }, + { + "insertOne": { + "namespace": "crud-tests.coll0", + "document": { + "_id": 1, + "x": 11 + } + } + } + ], + "ordered": false + }, + "expectError": { + "expectResult": { + "$$unsetOrMatches": { + "insertedCount": { + "$$exists": false + }, + "upsertedCount": { + "$$exists": false + }, + "matchedCount": { + "$$exists": false + }, + "modifiedCount": { + "$$exists": false + }, + "deletedCount": { + "$$exists": false + }, + "insertResults": { + "$$exists": false + }, + "updateResults": { + "$$exists": false + }, + "deleteResults": { + "$$exists": false + } + } + } + } + } + ] + }, + { + "description": "partialResult is set when first operation fails during an unordered bulk write (verbose)", + "operations": [ + { + "object": "client0", + "name": "clientBulkWrite", + "arguments": { + "models": [ + { + "insertOne": { + "namespace": "crud-tests.coll0", + "document": { + "_id": 1, + "x": 11 + } + } + }, + { + "insertOne": { + "namespace": "crud-tests.coll0", + "document": { + "_id": 2, + "x": 22 + } + } + } + ], + "ordered": false, + "verboseResults": true + }, + "expectError": { + "expectResult": { + "insertedCount": 1, + "upsertedCount": 0, + "matchedCount": 0, + "modifiedCount": 0, + "deletedCount": 0, + "insertResults": { + "1": { + "insertedId": 2 + } + }, + "updateResults": {}, + "deleteResults": {} + } + } + } + ] + }, + { + "description": "partialResult is set when first operation fails during an unordered bulk write (summary)", + "operations": [ + { + "object": "client0", + "name": "clientBulkWrite", + "arguments": { + "models": [ + { + "insertOne": { + "namespace": "crud-tests.coll0", + "document": { + "_id": 1, + "x": 11 + } + } + }, + { + "insertOne": { + "namespace": "crud-tests.coll0", + "document": { + "_id": 2, + "x": 22 + } + } + } + ], + "ordered": false, + "verboseResults": false + }, + "expectError": { + "expectResult": { + "insertedCount": 1, + "upsertedCount": 0, + "matchedCount": 0, + "modifiedCount": 0, + "deletedCount": 0, + "insertResults": { + "$$unsetOrMatches": {} + }, + "updateResults": { + "$$unsetOrMatches": {} + }, + "deleteResults": { + "$$unsetOrMatches": {} + } + } + } + } + ] + }, + { + "description": "partialResult is set when second operation fails during an unordered bulk write (verbose)", + "operations": [ + { + "object": "client0", + "name": "clientBulkWrite", + "arguments": { + "models": [ + { + "insertOne": { + "namespace": "crud-tests.coll0", + "document": { + "_id": 2, + "x": 22 + } + } + }, + { + "insertOne": { + "namespace": "crud-tests.coll0", + "document": { + "_id": 1, + "x": 11 + } + } + } + ], + "ordered": false, + "verboseResults": true + }, + "expectError": { + "expectResult": { + "insertedCount": 1, + "upsertedCount": 0, + "matchedCount": 0, + "modifiedCount": 0, + "deletedCount": 0, + "insertResults": { + "0": { + "insertedId": 2 + } + }, + "updateResults": {}, + "deleteResults": {} + } + } + } + ] + }, + { + "description": "partialResult is set when first operation fails during an unordered bulk write (summary)", + "operations": [ + { + "object": "client0", + "name": "clientBulkWrite", + "arguments": { + "models": [ + { + "insertOne": { + "namespace": "crud-tests.coll0", + "document": { + "_id": 2, + "x": 22 + } + } + }, + { + "insertOne": { + "namespace": "crud-tests.coll0", + "document": { + "_id": 1, + "x": 11 + } + } + } + ], + "ordered": false, + "verboseResults": false + }, + "expectError": { + "expectResult": { + "insertedCount": 1, + "upsertedCount": 0, + "matchedCount": 0, + "modifiedCount": 0, + "deletedCount": 0, + "insertResults": { + "$$unsetOrMatches": {} + }, + "updateResults": { + "$$unsetOrMatches": {} + }, + "deleteResults": { + "$$unsetOrMatches": {} + } + } + } + } + ] + } + ] +} diff --git a/test/crud/unified/client-bulkWrite-replaceOne-sort.json b/test/crud/unified/client-bulkWrite-replaceOne-sort.json new file mode 100644 index 0000000000..53218c1f48 --- /dev/null +++ b/test/crud/unified/client-bulkWrite-replaceOne-sort.json @@ -0,0 +1,162 @@ +{ + "description": "client bulkWrite updateOne-sort", + "schemaVersion": "1.4", + "runOnRequirements": [ + { + "minServerVersion": "8.0" + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "observeEvents": [ + "commandStartedEvent", + "commandSucceededEvent" + ] + } + }, + { + "database": { + "id": "database0", + "client": "client0", + "databaseName": "crud-tests" + } + }, + { + "collection": { + "id": "collection0", + "database": "database0", + "collectionName": "coll0" + } + } + ], + "initialData": [ + { + "collectionName": "coll0", + "databaseName": "crud-tests", + "documents": [ + { + "_id": 1, + "x": 11 + }, + { + "_id": 2, + "x": 22 + }, + { + "_id": 3, + "x": 33 + } + ] + } + ], + "_yamlAnchors": { + "namespace": "crud-tests.coll0" + }, + "tests": [ + { + "description": "client bulkWrite replaceOne with sort option", + "operations": [ + { + "object": "client0", + "name": "clientBulkWrite", + "arguments": { + "models": [ + { + "replaceOne": { + "namespace": "crud-tests.coll0", + "filter": { + "_id": { + "$gt": 1 + } + }, + "sort": { + "_id": -1 + }, + "replacement": { + "x": 1 + } + } + } + ] + } + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "commandName": "bulkWrite", + "databaseName": "admin", + "command": { + "bulkWrite": 1, + "ops": [ + { + "update": 0, + "filter": { + "_id": { + "$gt": 1 + } + }, + "updateMods": { + "x": 1 + }, + "sort": { + "_id": -1 + }, + "multi": { + "$$unsetOrMatches": false + }, + "upsert": { + "$$unsetOrMatches": false + } + } + ], + "nsInfo": [ + { + "ns": "crud-tests.coll0" + } + ] + } + } + }, + { + "commandSucceededEvent": { + "reply": { + "ok": 1, + "nErrors": 0, + "nMatched": 1, + "nModified": 1 + }, + "commandName": "bulkWrite" + } + } + ] + } + ], + "outcome": [ + { + "collectionName": "coll0", + "databaseName": "crud-tests", + "documents": [ + { + "_id": 1, + "x": 11 + }, + { + "_id": 2, + "x": 22 + }, + { + "_id": 3, + "x": 1 + } + ] + } + ] + } + ] +} diff --git a/test/crud/unified/client-bulkWrite-updateOne-sort.json b/test/crud/unified/client-bulkWrite-updateOne-sort.json new file mode 100644 index 0000000000..4a07b8b97c --- /dev/null +++ b/test/crud/unified/client-bulkWrite-updateOne-sort.json @@ -0,0 +1,166 @@ +{ + "description": "client bulkWrite updateOne-sort", + "schemaVersion": "1.4", + "runOnRequirements": [ + { + "minServerVersion": "8.0" + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "observeEvents": [ + "commandStartedEvent", + "commandSucceededEvent" + ] + } + }, + { + "database": { + "id": "database0", + "client": "client0", + "databaseName": "crud-tests" + } + }, + { + "collection": { + "id": "collection0", + "database": "database0", + "collectionName": "coll0" + } + } + ], + "initialData": [ + { + "collectionName": "coll0", + "databaseName": "crud-tests", + "documents": [ + { + "_id": 1, + "x": 11 + }, + { + "_id": 2, + "x": 22 + }, + { + "_id": 3, + "x": 33 + } + ] + } + ], + "_yamlAnchors": { + "namespace": "crud-tests.coll0" + }, + "tests": [ + { + "description": "client bulkWrite updateOne with sort option", + "operations": [ + { + "object": "client0", + "name": "clientBulkWrite", + "arguments": { + "models": [ + { + "updateOne": { + "namespace": "crud-tests.coll0", + "filter": { + "_id": { + "$gt": 1 + } + }, + "sort": { + "_id": -1 + }, + "update": { + "$inc": { + "x": 1 + } + } + } + } + ] + } + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "commandName": "bulkWrite", + "databaseName": "admin", + "command": { + "bulkWrite": 1, + "ops": [ + { + "update": 0, + "filter": { + "_id": { + "$gt": 1 + } + }, + "updateMods": { + "$inc": { + "x": 1 + } + }, + "sort": { + "_id": -1 + }, + "multi": { + "$$unsetOrMatches": false + }, + "upsert": { + "$$unsetOrMatches": false + } + } + ], + "nsInfo": [ + { + "ns": "crud-tests.coll0" + } + ] + } + } + }, + { + "commandSucceededEvent": { + "reply": { + "ok": 1, + "nErrors": 0, + "nMatched": 1, + "nModified": 1 + }, + "commandName": "bulkWrite" + } + } + ] + } + ], + "outcome": [ + { + "collectionName": "coll0", + "databaseName": "crud-tests", + "documents": [ + { + "_id": 1, + "x": 11 + }, + { + "_id": 2, + "x": 22 + }, + { + "_id": 3, + "x": 34 + } + ] + } + ] + } + ] +} diff --git a/test/crud/unified/db-aggregate-write-readPreference.json b/test/crud/unified/db-aggregate-write-readPreference.json index 2a81282de8..b6460f001f 100644 --- a/test/crud/unified/db-aggregate-write-readPreference.json +++ b/test/crud/unified/db-aggregate-write-readPreference.json @@ -52,13 +52,6 @@ } } ], - "initialData": [ - { - "collectionName": "coll0", - "databaseName": "db0", - "documents": [] - } - ], "tests": [ { "description": "Database-level aggregate with $out includes read preference for 5.0+ server", @@ -141,17 +134,6 @@ } ] } - ], - "outcome": [ - { - "collectionName": "coll0", - "databaseName": "db0", - "documents": [ - { - "_id": 1 - } - ] - } ] }, { @@ -235,17 +217,6 @@ } ] } - ], - "outcome": [ - { - "collectionName": "coll0", - "databaseName": "db0", - "documents": [ - { - "_id": 1 - } - ] - } ] }, { @@ -332,17 +303,6 @@ } ] } - ], - "outcome": [ - { - "collectionName": "coll0", - "databaseName": "db0", - "documents": [ - { - "_id": 1 - } - ] - } ] }, { @@ -429,17 +389,6 @@ } ] } - ], - "outcome": [ - { - "collectionName": "coll0", - "databaseName": "db0", - "documents": [ - { - "_id": 1 - } - ] - } ] } ] diff --git a/test/crud/unified/replaceOne-sort.json b/test/crud/unified/replaceOne-sort.json new file mode 100644 index 0000000000..cf2271dda5 --- /dev/null +++ b/test/crud/unified/replaceOne-sort.json @@ -0,0 +1,232 @@ +{ + "description": "replaceOne-sort", + "schemaVersion": "1.0", + "createEntities": [ + { + "client": { + "id": "client0", + "observeEvents": [ + "commandStartedEvent", + "commandSucceededEvent" + ] + } + }, + { + "database": { + "id": "database0", + "client": "client0", + "databaseName": "crud-tests" + } + }, + { + "collection": { + "id": "collection0", + "database": "database0", + "collectionName": "coll0" + } + } + ], + "initialData": [ + { + "collectionName": "coll0", + "databaseName": "crud-tests", + "documents": [ + { + "_id": 1, + "x": 11 + }, + { + "_id": 2, + "x": 22 + }, + { + "_id": 3, + "x": 33 + } + ] + } + ], + "tests": [ + { + "description": "ReplaceOne with sort option", + "runOnRequirements": [ + { + "minServerVersion": "8.0" + } + ], + "operations": [ + { + "name": "replaceOne", + "object": "collection0", + "arguments": { + "filter": { + "_id": { + "$gt": 1 + } + }, + "sort": { + "_id": -1 + }, + "replacement": { + "x": 1 + } + }, + "expectResult": { + "matchedCount": 1, + "modifiedCount": 1, + "upsertedCount": 0 + } + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "update": "coll0", + "updates": [ + { + "q": { + "_id": { + "$gt": 1 + } + }, + "u": { + "x": 1 + }, + "sort": { + "_id": -1 + }, + "multi": { + "$$unsetOrMatches": false + }, + "upsert": { + "$$unsetOrMatches": false + } + } + ] + } + } + }, + { + "commandSucceededEvent": { + "reply": { + "ok": 1, + "n": 1 + }, + "commandName": "update" + } + } + ] + } + ], + "outcome": [ + { + "collectionName": "coll0", + "databaseName": "crud-tests", + "documents": [ + { + "_id": 1, + "x": 11 + }, + { + "_id": 2, + "x": 22 + }, + { + "_id": 3, + "x": 1 + } + ] + } + ] + }, + { + "description": "replaceOne with sort option unsupported (server-side error)", + "runOnRequirements": [ + { + "maxServerVersion": "7.99" + } + ], + "operations": [ + { + "name": "replaceOne", + "object": "collection0", + "arguments": { + "filter": { + "_id": { + "$gt": 1 + } + }, + "sort": { + "_id": -1 + }, + "replacement": { + "x": 1 + } + }, + "expectError": { + "isClientError": false + } + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "update": "coll0", + "updates": [ + { + "q": { + "_id": { + "$gt": 1 + } + }, + "u": { + "x": 1 + }, + "sort": { + "_id": -1 + }, + "multi": { + "$$unsetOrMatches": false + }, + "upsert": { + "$$unsetOrMatches": false + } + } + ] + } + } + } + ] + } + ], + "outcome": [ + { + "collectionName": "coll0", + "databaseName": "crud-tests", + "documents": [ + { + "_id": 1, + "x": 11 + }, + { + "_id": 2, + "x": 22 + }, + { + "_id": 3, + "x": 33 + } + ] + } + ] + } + ] +} diff --git a/test/crud/unified/updateOne-sort.json b/test/crud/unified/updateOne-sort.json new file mode 100644 index 0000000000..8fe4f50b94 --- /dev/null +++ b/test/crud/unified/updateOne-sort.json @@ -0,0 +1,240 @@ +{ + "description": "updateOne-sort", + "schemaVersion": "1.0", + "createEntities": [ + { + "client": { + "id": "client0", + "observeEvents": [ + "commandStartedEvent", + "commandSucceededEvent" + ] + } + }, + { + "database": { + "id": "database0", + "client": "client0", + "databaseName": "crud-tests" + } + }, + { + "collection": { + "id": "collection0", + "database": "database0", + "collectionName": "coll0" + } + } + ], + "initialData": [ + { + "collectionName": "coll0", + "databaseName": "crud-tests", + "documents": [ + { + "_id": 1, + "x": 11 + }, + { + "_id": 2, + "x": 22 + }, + { + "_id": 3, + "x": 33 + } + ] + } + ], + "tests": [ + { + "description": "UpdateOne with sort option", + "runOnRequirements": [ + { + "minServerVersion": "8.0" + } + ], + "operations": [ + { + "name": "updateOne", + "object": "collection0", + "arguments": { + "filter": { + "_id": { + "$gt": 1 + } + }, + "sort": { + "_id": -1 + }, + "update": { + "$inc": { + "x": 1 + } + } + }, + "expectResult": { + "matchedCount": 1, + "modifiedCount": 1, + "upsertedCount": 0 + } + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "update": "coll0", + "updates": [ + { + "q": { + "_id": { + "$gt": 1 + } + }, + "u": { + "$inc": { + "x": 1 + } + }, + "sort": { + "_id": -1 + }, + "multi": { + "$$unsetOrMatches": false + }, + "upsert": { + "$$unsetOrMatches": false + } + } + ] + } + } + }, + { + "commandSucceededEvent": { + "reply": { + "ok": 1, + "n": 1 + }, + "commandName": "update" + } + } + ] + } + ], + "outcome": [ + { + "collectionName": "coll0", + "databaseName": "crud-tests", + "documents": [ + { + "_id": 1, + "x": 11 + }, + { + "_id": 2, + "x": 22 + }, + { + "_id": 3, + "x": 34 + } + ] + } + ] + }, + { + "description": "updateOne with sort option unsupported (server-side error)", + "runOnRequirements": [ + { + "maxServerVersion": "7.99" + } + ], + "operations": [ + { + "name": "updateOne", + "object": "collection0", + "arguments": { + "filter": { + "_id": { + "$gt": 1 + } + }, + "sort": { + "_id": -1 + }, + "update": { + "$inc": { + "x": 1 + } + } + }, + "expectError": { + "isClientError": false + } + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "update": "coll0", + "updates": [ + { + "q": { + "_id": { + "$gt": 1 + } + }, + "u": { + "$inc": { + "x": 1 + } + }, + "sort": { + "_id": -1 + }, + "multi": { + "$$unsetOrMatches": false + }, + "upsert": { + "$$unsetOrMatches": false + } + } + ] + } + } + } + ] + } + ], + "outcome": [ + { + "collectionName": "coll0", + "databaseName": "crud-tests", + "documents": [ + { + "_id": 1, + "x": 11 + }, + { + "_id": 2, + "x": 22 + }, + { + "_id": 3, + "x": 33 + } + ] + } + ] + } + ] +} diff --git a/test/utils.py b/test/utils.py index 4575a9fe10..3eac4fa509 100644 --- a/test/utils.py +++ b/test/utils.py @@ -958,10 +958,6 @@ def parse_spec_options(opts): def prepare_spec_arguments(spec, arguments, opname, entity_map, with_txn_callback): for arg_name in list(arguments): c2s = camel_to_snake(arg_name) - # PyMongo accepts sort as list of tuples. - if arg_name == "sort": - sort_dict = arguments[arg_name] - arguments[arg_name] = list(sort_dict.items()) # Named "key" instead not fieldName. if arg_name == "fieldName": arguments["key"] = arguments.pop(arg_name) From 8ce21bc1217f4c3c722a50d81c08ec33ca9d46dc Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Thu, 17 Oct 2024 09:18:01 -0500 Subject: [PATCH 044/182] PYTHON-4872 Use shrub.py to generate encryption tasks (#1938) --- .evergreen/config.yml | 387 ++++++++++++++++++++------ .evergreen/scripts/generate_config.py | 88 +++++- 2 files changed, 371 insertions(+), 104 deletions(-) diff --git a/.evergreen/config.yml b/.evergreen/config.yml index c3427e66d0..54a1ff3368 100644 --- a/.evergreen/config.yml +++ b/.evergreen/config.yml @@ -2322,32 +2322,6 @@ axes: variables: COVERAGE: "coverage" - # Run encryption tests? - - id: encryption - display_name: "Encryption" - values: - - id: "encryption" - display_name: "Encryption" - tags: ["encryption_tag"] - variables: - test_encryption: true - batchtime: 10080 # 7 days - - id: "encryption_pyopenssl" - display_name: "Encryption PyOpenSSL" - tags: ["encryption_tag"] - variables: - test_encryption: true - test_encryption_pyopenssl: true - batchtime: 10080 # 7 days - # The path to crypt_shared is stored in the $CRYPT_SHARED_LIB_PATH expansion. - - id: "encryption_crypt_shared" - display_name: "Encryption shared lib" - tags: ["encryption_tag"] - variables: - test_encryption: true - test_crypt_shared: true - batchtime: 10080 # 7 days - # Run pyopenssl tests? - id: pyopenssl display_name: "PyOpenSSL" @@ -2864,6 +2838,303 @@ buildvariants: PYTHON_BINARY: C:/python/32/Python39/python.exe SKIP_CSOT_TESTS: "true" +# Encryption tests. +- name: encryption-rhel8-py3.9-auth-ssl + tasks: + - name: .standalone + - name: .replica_set + - name: .sharded_cluster + display_name: Encryption RHEL8 py3.9 Auth SSL + run_on: + - rhel87-small + batchtime: 10080 + expansions: + AUTH: auth + SSL: ssl + test_encryption: "true" + PYTHON_BINARY: /opt/python/3.9/bin/python3 + tags: [encryption_tag] +- name: encryption-rhel8-py3.13-auth-ssl + tasks: + - name: .standalone + - name: .replica_set + - name: .sharded_cluster + display_name: Encryption RHEL8 py3.13 Auth SSL + run_on: + - rhel87-small + batchtime: 10080 + expansions: + AUTH: auth + SSL: ssl + test_encryption: "true" + PYTHON_BINARY: /opt/python/3.13/bin/python3 + tags: [encryption_tag] +- name: encryption-rhel8-pypy3.10-auth-ssl + tasks: + - name: .standalone + - name: .replica_set + - name: .sharded_cluster + display_name: Encryption RHEL8 pypy3.10 Auth SSL + run_on: + - rhel87-small + batchtime: 10080 + expansions: + AUTH: auth + SSL: ssl + test_encryption: "true" + PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 + tags: [encryption_tag] +- name: encryption-crypt_shared-rhel8-py3.9-auth-ssl + tasks: + - name: .standalone + - name: .replica_set + - name: .sharded_cluster + display_name: Encryption crypt_shared RHEL8 py3.9 Auth SSL + run_on: + - rhel87-small + batchtime: 10080 + expansions: + AUTH: auth + SSL: ssl + test_encryption: "true" + test_crypt_shared: "true" + PYTHON_BINARY: /opt/python/3.9/bin/python3 + tags: [encryption_tag] +- name: encryption-crypt_shared-rhel8-py3.13-auth-ssl + tasks: + - name: .standalone + - name: .replica_set + - name: .sharded_cluster + display_name: Encryption crypt_shared RHEL8 py3.13 Auth SSL + run_on: + - rhel87-small + batchtime: 10080 + expansions: + AUTH: auth + SSL: ssl + test_encryption: "true" + test_crypt_shared: "true" + PYTHON_BINARY: /opt/python/3.13/bin/python3 + tags: [encryption_tag] +- name: encryption-crypt_shared-rhel8-pypy3.10-auth-ssl + tasks: + - name: .standalone + - name: .replica_set + - name: .sharded_cluster + display_name: Encryption crypt_shared RHEL8 pypy3.10 Auth SSL + run_on: + - rhel87-small + batchtime: 10080 + expansions: + AUTH: auth + SSL: ssl + test_encryption: "true" + test_crypt_shared: "true" + PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 + tags: [encryption_tag] +- name: encryption-pyopenssl-rhel8-py3.9-auth-ssl + tasks: + - name: .standalone + - name: .replica_set + - name: .sharded_cluster + display_name: Encryption PyOpenSSL RHEL8 py3.9 Auth SSL + run_on: + - rhel87-small + batchtime: 10080 + expansions: + AUTH: auth + SSL: ssl + test_encryption: "true" + test_encryption_pyopenssl: "true" + PYTHON_BINARY: /opt/python/3.9/bin/python3 + tags: [encryption_tag] +- name: encryption-pyopenssl-rhel8-py3.13-auth-ssl + tasks: + - name: .standalone + - name: .replica_set + - name: .sharded_cluster + display_name: Encryption PyOpenSSL RHEL8 py3.13 Auth SSL + run_on: + - rhel87-small + batchtime: 10080 + expansions: + AUTH: auth + SSL: ssl + test_encryption: "true" + test_encryption_pyopenssl: "true" + PYTHON_BINARY: /opt/python/3.13/bin/python3 + tags: [encryption_tag] +- name: encryption-pyopenssl-rhel8-pypy3.10-auth-ssl + tasks: + - name: .standalone + - name: .replica_set + - name: .sharded_cluster + display_name: Encryption PyOpenSSL RHEL8 pypy3.10 Auth SSL + run_on: + - rhel87-small + batchtime: 10080 + expansions: + AUTH: auth + SSL: ssl + test_encryption: "true" + test_encryption_pyopenssl: "true" + PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 + tags: [encryption_tag] +- name: encryption-rhel8-py3.10-auth-ssl + tasks: + - name: .replica_set + display_name: Encryption RHEL8 py3.10 Auth SSL + run_on: + - rhel87-small + expansions: + AUTH: auth + SSL: ssl + test_encryption: "true" + PYTHON_BINARY: /opt/python/3.10/bin/python3 +- name: encryption-crypt_shared-rhel8-py3.11-auth-nossl + tasks: + - name: .replica_set + display_name: Encryption crypt_shared RHEL8 py3.11 Auth NoSSL + run_on: + - rhel87-small + expansions: + AUTH: auth + SSL: nossl + test_encryption: "true" + test_crypt_shared: "true" + PYTHON_BINARY: /opt/python/3.11/bin/python3 +- name: encryption-pyopenssl-rhel8-py3.12-auth-ssl + tasks: + - name: .replica_set + display_name: Encryption PyOpenSSL RHEL8 py3.12 Auth SSL + run_on: + - rhel87-small + expansions: + AUTH: auth + SSL: ssl + test_encryption: "true" + TEST_ENCRYPTION_PYOPENSSL: "true" + PYTHON_BINARY: /opt/python/3.12/bin/python3 +- name: encryption-rhel8-pypy3.9-auth-nossl + tasks: + - name: .replica_set + display_name: Encryption RHEL8 pypy3.9 Auth NoSSL + run_on: + - rhel87-small + expansions: + AUTH: auth + SSL: nossl + test_encryption: "true" + PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 +- name: encryption-macos-py3.9-auth-ssl + tasks: + - name: .latest .replica_set + display_name: Encryption macOS py3.9 Auth SSL + run_on: + - macos-14 + batchtime: 10080 + expansions: + AUTH: auth + SSL: ssl + test_encryption: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 + tags: [encryption_tag] +- name: encryption-macos-py3.13-auth-nossl + tasks: + - name: .latest .replica_set + display_name: Encryption macOS py3.13 Auth NoSSL + run_on: + - macos-14 + batchtime: 10080 + expansions: + AUTH: auth + SSL: nossl + test_encryption: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 + tags: [encryption_tag] +- name: encryption-crypt_shared-macos-py3.9-auth-ssl + tasks: + - name: .latest .replica_set + display_name: Encryption crypt_shared macOS py3.9 Auth SSL + run_on: + - macos-14 + batchtime: 10080 + expansions: + AUTH: auth + SSL: ssl + test_encryption: "true" + test_crypt_shared: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 + tags: [encryption_tag] +- name: encryption-crypt_shared-macos-py3.13-auth-nossl + tasks: + - name: .latest .replica_set + display_name: Encryption crypt_shared macOS py3.13 Auth NoSSL + run_on: + - macos-14 + batchtime: 10080 + expansions: + AUTH: auth + SSL: nossl + test_encryption: "true" + test_crypt_shared: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 + tags: [encryption_tag] +- name: encryption-win64-py3.9-auth-ssl + tasks: + - name: .latest .replica_set + display_name: Encryption Win64 py3.9 Auth SSL + run_on: + - windows-64-vsMulti-small + batchtime: 10080 + expansions: + AUTH: auth + SSL: ssl + test_encryption: "true" + PYTHON_BINARY: C:/python/Python39/python.exe + tags: [encryption_tag] +- name: encryption-win64-py3.13-auth-nossl + tasks: + - name: .latest .replica_set + display_name: Encryption Win64 py3.13 Auth NoSSL + run_on: + - windows-64-vsMulti-small + batchtime: 10080 + expansions: + AUTH: auth + SSL: nossl + test_encryption: "true" + PYTHON_BINARY: C:/python/Python313/python.exe + tags: [encryption_tag] +- name: encryption-crypt_shared-win64-py3.9-auth-ssl + tasks: + - name: .latest .replica_set + display_name: Encryption crypt_shared Win64 py3.9 Auth SSL + run_on: + - windows-64-vsMulti-small + batchtime: 10080 + expansions: + AUTH: auth + SSL: ssl + test_encryption: "true" + test_crypt_shared: "true" + PYTHON_BINARY: C:/python/Python39/python.exe + tags: [encryption_tag] +- name: encryption-crypt_shared-win64-py3.13-auth-nossl + tasks: + - name: .latest .replica_set + display_name: Encryption crypt_shared Win64 py3.13 Auth NoSSL + run_on: + - windows-64-vsMulti-small + batchtime: 10080 + expansions: + AUTH: auth + SSL: nossl + test_encryption: "true" + test_crypt_shared: "true" + PYTHON_BINARY: C:/python/Python313/python.exe + tags: [encryption_tag] + - matrix_name: "tests-fips" matrix_spec: platform: @@ -2874,33 +3145,6 @@ buildvariants: tasks: - "test-fips-standalone" -- matrix_name: "test-macos-encryption" - matrix_spec: - platform: - - macos - auth: "auth" - ssl: "nossl" - encryption: "*" - display_name: "${encryption} ${platform} ${auth} ${ssl}" - tasks: "test-latest-replica_set" - rules: - - if: - encryption: ["encryption", "encryption_crypt_shared"] - platform: macos - auth: "auth" - ssl: "nossl" - then: - add_tasks: &encryption-server-versions - - ".rapid" - - ".latest" - - ".8.0" - - ".7.0" - - ".6.0" - - ".5.0" - - ".4.4" - - ".4.2" - - ".4.0" - # Test one server version with zSeries, POWER8, and ARM. - matrix_name: "test-different-cpu-architectures" matrix_spec: @@ -2954,26 +3198,6 @@ buildvariants: tasks: - '.replica_set' -- matrix_name: "tests-python-version-rhel8-test-encryption" - matrix_spec: - platform: rhel8 - python-version: "*" - auth-ssl: noauth-nossl -# TODO: dependency error for 'coverage-report' task: -# dependency tests-python-version-rhel62-test-encryption_.../test-2.6-standalone is not present in the project config -# coverage: "*" - encryption: "*" - display_name: "${encryption} ${python-version} ${platform} ${auth-ssl}" - tasks: "test-latest-replica_set" - rules: - - if: - encryption: ["encryption", "encryption_crypt_shared"] - platform: rhel8 - auth-ssl: noauth-nossl - python-version: "*" - then: - add_tasks: *encryption-server-versions - - matrix_name: "tests-python-version-rhel8-without-c-extensions" matrix_spec: platform: rhel8 @@ -3057,23 +3281,6 @@ buildvariants: tasks: - ".5.0" -- matrix_name: "tests-windows-encryption" - matrix_spec: - platform: windows - python-version-windows: "*" - auth-ssl: "*" - encryption: "*" - display_name: "${encryption} ${platform} ${python-version-windows} ${auth-ssl}" - tasks: "test-latest-replica_set" - rules: - - if: - encryption: ["encryption", "encryption_crypt_shared"] - platform: windows - python-version-windows: "*" - auth-ssl: "*" - then: - add_tasks: *encryption-server-versions - # Storage engine tests on RHEL 8.4 (x86_64) with Python 3.9. - matrix_name: "tests-storage-engines" matrix_spec: diff --git a/.evergreen/scripts/generate_config.py b/.evergreen/scripts/generate_config.py index 044303ad8f..dcd97b093e 100644 --- a/.evergreen/scripts/generate_config.py +++ b/.evergreen/scripts/generate_config.py @@ -45,18 +45,13 @@ class Host: name: str run_on: str display_name: str - expansions: dict[str, str] -_macos_expansions = dict( # CSOT tests are unreliable on slow hosts. - SKIP_CSOT_TESTS="true" -) - -HOSTS["rhel8"] = Host("rhel8", "rhel87-small", "RHEL8", dict()) -HOSTS["win64"] = Host("win64", "windows-64-vsMulti-small", "Win64", _macos_expansions) -HOSTS["win32"] = Host("win32", "windows-64-vsMulti-small", "Win32", _macos_expansions) -HOSTS["macos"] = Host("macos", "macos-14", "macOS", _macos_expansions) -HOSTS["macos-arm64"] = Host("macos-arm64", "macos-14-arm64", "macOS Arm64", _macos_expansions) +HOSTS["rhel8"] = Host("rhel8", "rhel87-small", "RHEL8") +HOSTS["win64"] = Host("win64", "windows-64-vsMulti-small", "Win64") +HOSTS["win32"] = Host("win32", "windows-64-vsMulti-small", "Win32") +HOSTS["macos"] = Host("macos", "macos-14", "macOS") +HOSTS["macos-arm64"] = Host("macos-arm64", "macos-14-arm64", "macOS Arm64") ############## @@ -84,7 +79,6 @@ def create_variant( expansions["PYTHON_BINARY"] = get_python_binary(python, host) if version: expansions["VERSION"] = version - expansions.update(HOSTS[host].expansions) expansions = expansions or None return BuildVariant( name=name, @@ -129,7 +123,7 @@ def get_display_name(base: str, host: str, **kwargs) -> str: elif key.lower() in DISPLAY_LOOKUP: name = DISPLAY_LOOKUP[key.lower()][value] else: - raise ValueError(f"Missing display handling for {key}") + continue display_name = f"{display_name} {name}" return display_name @@ -235,7 +229,7 @@ def create_server_variants() -> list[BuildVariant]: zip_cycle(MIN_MAX_PYTHON, AUTH_SSLS, TOPOLOGIES), SYNCS ): test_suite = "default" if sync == "sync" else "default_async" - expansions = dict(AUTH=auth, SSL=ssl, TEST_SUITES=test_suite) + expansions = dict(AUTH=auth, SSL=ssl, TEST_SUITES=test_suite, SKIP_CSOT_TESTS="true") display_name = get_display_name("Test", host, python=python, **expansions) variant = create_variant( [f".{topology}"], @@ -249,8 +243,74 @@ def create_server_variants() -> list[BuildVariant]: return variants +def create_encryption_variants() -> list[BuildVariant]: + variants = [] + tags = ["encryption_tag"] + batchtime = BATCHTIME_WEEK + + def get_encryption_expansions(encryption, ssl="ssl"): + expansions = dict(AUTH="auth", SSL=ssl, test_encryption="true") + if "crypt_shared" in encryption: + expansions["test_crypt_shared"] = "true" + if "PyOpenSSL" in encryption: + expansions["test_encryption_pyopenssl"] = "true" + return expansions + + host = "rhel8" + + # Test against all server versions and topolgies for the three main python versions. + encryptions = ["Encryption", "Encryption crypt_shared", "Encryption PyOpenSSL"] + for encryption, python in product(encryptions, [*MIN_MAX_PYTHON, PYPYS[-1]]): + expansions = get_encryption_expansions(encryption) + display_name = get_display_name(encryption, host, python=python, **expansions) + variant = create_variant( + [f".{t}" for t in TOPOLOGIES], + display_name, + python=python, + host=host, + expansions=expansions, + batchtime=batchtime, + tags=tags, + ) + variants.append(variant) + + # Test the rest of the pythons on linux for all server versions. + for encryption, python, ssl in zip_cycle( + encryptions, CPYTHONS[1:-1] + PYPYS[:-1], ["ssl", "nossl"] + ): + expansions = get_encryption_expansions(encryption, ssl) + display_name = get_display_name(encryption, host, python=python, **expansions) + variant = create_variant( + [".replica_set"], + display_name, + python=python, + host=host, + expansions=expansions, + ) + variants.append(variant) + + # Test on macos and linux on one server version and topology for min and max python. + encryptions = ["Encryption", "Encryption crypt_shared"] + task_names = [".latest .replica_set"] + for host, encryption, python in product(["macos", "win64"], encryptions, MIN_MAX_PYTHON): + ssl = "ssl" if python == CPYTHONS[0] else "nossl" + expansions = get_encryption_expansions(encryption, ssl) + display_name = get_display_name(encryption, host, python=python, **expansions) + variant = create_variant( + task_names, + display_name, + python=python, + host=host, + expansions=expansions, + batchtime=batchtime, + tags=tags, + ) + variants.append(variant) + return variants + + ################## # Generate Config ################## -generate_yaml(variants=create_server_variants()) +generate_yaml(variants=create_encryption_variants()) From a62ade864ddc07f1c0ee2782ef07ecfbf07fefd7 Mon Sep 17 00:00:00 2001 From: Noah Stapp Date: Thu, 17 Oct 2024 11:32:39 -0400 Subject: [PATCH 045/182] PYTHON-4874 - Add KMS support for async Windows (#1939) --- pymongo/network_layer.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/pymongo/network_layer.py b/pymongo/network_layer.py index d14a21f41d..7a325853c8 100644 --- a/pymongo/network_layer.py +++ b/pymongo/network_layer.py @@ -205,7 +205,7 @@ async def _async_sendall_ssl( total_sent += sent async def _async_receive_ssl( - conn: _sslConn, length: int, dummy: AbstractEventLoop + conn: _sslConn, length: int, dummy: AbstractEventLoop, once: Optional[bool] = False ) -> memoryview: mv = memoryview(bytearray(length)) total_read = 0 @@ -215,6 +215,9 @@ async def _async_receive_ssl( while total_read < length: try: read = conn.recv_into(mv[total_read:]) + # KMS responses update their expected size after the first batch, stop reading after one loop + if once: + return mv[:read] if read == 0: raise OSError("connection closed") except BLOCKING_IO_ERRORS: From 79033bc0b9a6e404dc7680a03d856f12942ec720 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Thu, 17 Oct 2024 10:33:44 -0500 Subject: [PATCH 046/182] Revert "PYTHON-4765 Resync server-selection spec" (#1940) --- .../operation-id.json | 4 +- .../server_selection_logging/replica-set.json | 2 +- test/server_selection_logging/sharded.json | 2 +- test/server_selection_logging/standalone.json | 930 +++++++++++++++++- 4 files changed, 932 insertions(+), 6 deletions(-) diff --git a/test/server_selection_logging/operation-id.json b/test/server_selection_logging/operation-id.json index 72ebff60d8..ccc2623166 100644 --- a/test/server_selection_logging/operation-id.json +++ b/test/server_selection_logging/operation-id.json @@ -197,7 +197,7 @@ } }, { - "level": "info", + "level": "debug", "component": "serverSelection", "data": { "message": "Waiting for suitable server to become available", @@ -383,7 +383,7 @@ } }, { - "level": "info", + "level": "debug", "component": "serverSelection", "data": { "message": "Waiting for suitable server to become available", diff --git a/test/server_selection_logging/replica-set.json b/test/server_selection_logging/replica-set.json index 5eba784bf2..830b1ea51a 100644 --- a/test/server_selection_logging/replica-set.json +++ b/test/server_selection_logging/replica-set.json @@ -184,7 +184,7 @@ } }, { - "level": "info", + "level": "debug", "component": "serverSelection", "data": { "message": "Waiting for suitable server to become available", diff --git a/test/server_selection_logging/sharded.json b/test/server_selection_logging/sharded.json index d42fba9100..346c050f9e 100644 --- a/test/server_selection_logging/sharded.json +++ b/test/server_selection_logging/sharded.json @@ -193,7 +193,7 @@ } }, { - "level": "info", + "level": "debug", "component": "serverSelection", "data": { "message": "Waiting for suitable server to become available", diff --git a/test/server_selection_logging/standalone.json b/test/server_selection_logging/standalone.json index 3b3eddd841..3152d0bbf3 100644 --- a/test/server_selection_logging/standalone.json +++ b/test/server_selection_logging/standalone.json @@ -47,9 +47,29 @@ } } ], + "initialData": [ + { + "collectionName": "server-selection", + "databaseName": "logging-tests", + "documents": [ + { + "_id": 1, + "x": 11 + }, + { + "_id": 2, + "x": 22 + }, + { + "_id": 3, + "x": 33 + } + ] + } + ], "tests": [ { - "description": "A successful operation", + "description": "A successful insert operation", "operations": [ { "name": "waitForEvent", @@ -191,7 +211,7 @@ } }, { - "level": "info", + "level": "debug", "component": "serverSelection", "data": { "message": "Waiting for suitable server to become available", @@ -230,6 +250,912 @@ ] } ] + }, + { + "description": "A successful find operation", + "operations": [ + { + "name": "waitForEvent", + "object": "testRunner", + "arguments": { + "client": "client", + "event": { + "topologyDescriptionChangedEvent": {} + }, + "count": 2 + } + }, + { + "name": "findOne", + "object": "collection", + "arguments": { + "filter": { + "x": 1 + } + } + } + + ], + "expectLogMessages": [ + { + "client": "client", + "messages": [ + { + "level": "debug", + "component": "serverSelection", + "data": { + "message": "Server selection started", + "selector": { + "$$exists": true + }, + "operation": "find", + "topologyDescription": { + "$$exists": true + } + } + }, + { + "level": "debug", + "component": "serverSelection", + "data": { + "message": "Server selection succeeded", + "selector": { + "$$exists": true + }, + "operation": "find", + "topologyDescription": { + "$$exists": true + }, + "serverHost": { + "$$type": "string" + }, + "serverPort": { + "$$type": [ + "int", + "long" + ] + } + } + } + ] + } + ] + }, + { + "description": "A successful findAndModify operation", + "operations": [ + { + "name": "waitForEvent", + "object": "testRunner", + "arguments": { + "client": "client", + "event": { + "topologyDescriptionChangedEvent": {} + }, + "count": 2 + } + }, + { + "name": "findOneAndReplace", + "object": "collection", + "arguments": { + "filter": { + "x": 1 + }, + "replacement": { + "x": 11 + } + } + } + ], + "expectLogMessages": [ + { + "client": "client", + "messages": [ + { + "level": "debug", + "component": "serverSelection", + "data": { + "message": "Server selection started", + "selector": { + "$$exists": true + }, + "operation": "findAndModify", + "topologyDescription": { + "$$exists": true + } + } + }, + { + "level": "debug", + "component": "serverSelection", + "data": { + "message": "Server selection succeeded", + "selector": { + "$$exists": true + }, + "operation": "findAndModify", + "topologyDescription": { + "$$exists": true + }, + "serverHost": { + "$$type": "string" + }, + "serverPort": { + "$$type": [ + "int", + "long" + ] + } + } + } + ] + } + ] + }, + { + "description": "A successful find and getMore operation", + "operations": [ + { + "name": "waitForEvent", + "object": "testRunner", + "arguments": { + "client": "client", + "event": { + "topologyDescriptionChangedEvent": {} + }, + "count": 2 + } + }, + { + "name": "find", + "object": "collection", + "arguments": { + "batchSize": 3 + } + } + ], + "expectLogMessages": [ + { + "client": "client", + "messages": [ + { + "level": "debug", + "component": "serverSelection", + "data": { + "message": "Server selection started", + "selector": { + "$$exists": true + }, + "operation": "find", + "topologyDescription": { + "$$exists": true + } + } + }, + { + "level": "debug", + "component": "serverSelection", + "data": { + "message": "Server selection succeeded", + "selector": { + "$$exists": true + }, + "operation": "find", + "topologyDescription": { + "$$exists": true + }, + "serverHost": { + "$$type": "string" + }, + "serverPort": { + "$$type": [ + "int", + "long" + ] + } + } + }, + { + "level": "debug", + "component": "serverSelection", + "data": { + "message": "Server selection started", + "selector": { + "$$exists": true + }, + "operation": "getMore", + "topologyDescription": { + "$$exists": true + } + } + }, + { + "level": "debug", + "component": "serverSelection", + "data": { + "message": "Server selection succeeded", + "selector": { + "$$exists": true + }, + "operation": "getMore", + "topologyDescription": { + "$$exists": true + }, + "serverHost": { + "$$type": "string" + }, + "serverPort": { + "$$type": [ + "int", + "long" + ] + } + } + } + ] + } + ] + }, + { + "description": "A successful aggregate operation", + "operations": [ + { + "name": "waitForEvent", + "object": "testRunner", + "arguments": { + "client": "client", + "event": { + "topologyDescriptionChangedEvent": {} + }, + "count": 2 + } + }, + { + "name": "aggregate", + "object": "collection", + "arguments": { + "pipeline": [ + { + "$match": { + "_id": { + "$gt": 1 + } + } + } + ] + } + } + ], + "expectLogMessages": [ + { + "client": "client", + "messages": [ + { + "level": "debug", + "component": "serverSelection", + "data": { + "message": "Server selection started", + "selector": { + "$$exists": true + }, + "operation": "aggregate", + "topologyDescription": { + "$$exists": true + } + } + }, + { + "level": "debug", + "component": "serverSelection", + "data": { + "message": "Server selection succeeded", + "selector": { + "$$exists": true + }, + "operation": "aggregate", + "topologyDescription": { + "$$exists": true + }, + "serverHost": { + "$$type": "string" + }, + "serverPort": { + "$$type": [ + "int", + "long" + ] + } + } + } + ] + } + ] + }, + { + "description": "A successful count operation", + "operations": [ + { + "name": "waitForEvent", + "object": "testRunner", + "arguments": { + "client": "client", + "event": { + "topologyDescriptionChangedEvent": {} + }, + "count": 2 + } + }, + { + "name": "countDocuments", + "object": "collection", + "arguments": { + "filter": {} + } + } + ], + "expectLogMessages": [ + { + "client": "client", + "messages": [ + { + "level": "debug", + "component": "serverSelection", + "data": { + "message": "Server selection started", + "selector": { + "$$exists": true + }, + "operation": "count", + "topologyDescription": { + "$$exists": true + } + } + }, + { + "level": "debug", + "component": "serverSelection", + "data": { + "message": "Server selection succeeded", + "selector": { + "$$exists": true + }, + "operation": "count", + "topologyDescription": { + "$$exists": true + }, + "serverHost": { + "$$type": "string" + }, + "serverPort": { + "$$type": [ + "int", + "long" + ] + } + } + } + ] + } + ] + }, + { + "description": "A successful distinct operation", + "operations": [ + { + "name": "waitForEvent", + "object": "testRunner", + "arguments": { + "client": "client", + "event": { + "topologyDescriptionChangedEvent": {} + }, + "count": 2 + } + }, + { + "name": "distinct", + "object": "collection", + "arguments": { + "fieldName": "x", + "filter": {} + } + } + ], + "expectLogMessages": [ + { + "client": "client", + "messages": [ + { + "level": "debug", + "component": "serverSelection", + "data": { + "message": "Server selection started", + "selector": { + "$$exists": true + }, + "operation": "distinct", + "topologyDescription": { + "$$exists": true + } + } + }, + { + "level": "debug", + "component": "serverSelection", + "data": { + "message": "Server selection succeeded", + "selector": { + "$$exists": true + }, + "operation": "distinct", + "topologyDescription": { + "$$exists": true + }, + "serverHost": { + "$$type": "string" + }, + "serverPort": { + "$$type": [ + "int", + "long" + ] + } + } + } + ] + } + ] + }, + { + "description": "Successful collection management operations", + "operations": [ + { + "name": "waitForEvent", + "object": "testRunner", + "arguments": { + "client": "client", + "event": { + "topologyDescriptionChangedEvent": {} + }, + "count": 2 + } + }, + { + "name": "createCollection", + "object": "database", + "arguments": { + "collection": "foo" + } + }, + { + "name": "listCollections", + "object": "database" + }, + { + "name": "dropCollection", + "object": "database", + "arguments": { + "collection": "foo" + } + } + ], + "expectLogMessages": [ + { + "client": "client", + "messages": [ + { + "level": "debug", + "component": "serverSelection", + "data": { + "message": "Server selection started", + "selector": { + "$$exists": true + }, + "operation": "create", + "topologyDescription": { + "$$exists": true + } + } + }, + { + "level": "debug", + "component": "serverSelection", + "data": { + "message": "Server selection succeeded", + "selector": { + "$$exists": true + }, + "operation": "create", + "topologyDescription": { + "$$exists": true + }, + "serverHost": { + "$$type": "string" + }, + "serverPort": { + "$$type": [ + "int", + "long" + ] + } + } + }, + { + "level": "debug", + "component": "serverSelection", + "data": { + "message": "Server selection started", + "selector": { + "$$exists": true + }, + "operation": "listCollections", + "topologyDescription": { + "$$exists": true + } + } + }, + { + "level": "debug", + "component": "serverSelection", + "data": { + "message": "Server selection succeeded", + "selector": { + "$$exists": true + }, + "operation": "listCollections", + "topologyDescription": { + "$$exists": true + }, + "serverHost": { + "$$type": "string" + }, + "serverPort": { + "$$type": [ + "int", + "long" + ] + } + } + }, + { + "level": "debug", + "component": "serverSelection", + "data": { + "message": "Server selection started", + "selector": { + "$$exists": true + }, + "operation": "drop", + "topologyDescription": { + "$$exists": true + } + } + }, + { + "level": "debug", + "component": "serverSelection", + "data": { + "message": "Server selection succeeded", + "selector": { + "$$exists": true + }, + "operation": "drop", + "topologyDescription": { + "$$exists": true + }, + "serverHost": { + "$$type": "string" + }, + "serverPort": { + "$$type": [ + "int", + "long" + ] + } + } + } + ] + } + ] + }, + { + "description": "Successful index operations", + "operations": [ + { + "name": "waitForEvent", + "object": "testRunner", + "arguments": { + "client": "client", + "event": { + "topologyDescriptionChangedEvent": {} + }, + "count": 2 + } + }, + { + "name": "createIndex", + "object": "collection", + "arguments": { + "keys": { + "x": 1 + }, + "name": "x_1" + } + }, + { + "name": "listIndexes", + "object": "collection" + }, + { + "name": "dropIndex", + "object": "collection", + "arguments": { + "name": "x_1" + } + } + ], + "expectLogMessages": [ + { + "client": "client", + "messages": [ + { + "level": "debug", + "component": "serverSelection", + "data": { + "message": "Server selection started", + "selector": { + "$$exists": true + }, + "operation": "createIndexes", + "topologyDescription": { + "$$exists": true + } + } + }, + { + "level": "debug", + "component": "serverSelection", + "data": { + "message": "Server selection succeeded", + "selector": { + "$$exists": true + }, + "operation": "createIndexes", + "topologyDescription": { + "$$exists": true + }, + "serverHost": { + "$$type": "string" + }, + "serverPort": { + "$$type": [ + "int", + "long" + ] + } + } + }, + { + "level": "debug", + "component": "serverSelection", + "data": { + "message": "Server selection started", + "selector": { + "$$exists": true + }, + "operation": "listIndexes", + "topologyDescription": { + "$$exists": true + } + } + }, + { + "level": "debug", + "component": "serverSelection", + "data": { + "message": "Server selection succeeded", + "selector": { + "$$exists": true + }, + "operation": "listIndexes", + "topologyDescription": { + "$$exists": true + }, + "serverHost": { + "$$type": "string" + }, + "serverPort": { + "$$type": [ + "int", + "long" + ] + } + } + }, + { + "level": "debug", + "component": "serverSelection", + "data": { + "message": "Server selection started", + "selector": { + "$$exists": true + }, + "operation": "dropIndexes", + "topologyDescription": { + "$$exists": true + } + } + }, + { + "level": "debug", + "component": "serverSelection", + "data": { + "message": "Server selection succeeded", + "selector": { + "$$exists": true + }, + "operation": "dropIndexes", + "topologyDescription": { + "$$exists": true + }, + "serverHost": { + "$$type": "string" + }, + "serverPort": { + "$$type": [ + "int", + "long" + ] + } + } + } + ] + } + ] + }, + { + "description": "A successful update operation", + "operations": [ + { + "name": "waitForEvent", + "object": "testRunner", + "arguments": { + "client": "client", + "event": { + "topologyDescriptionChangedEvent": {} + }, + "count": 2 + } + }, + { + "name": "updateOne", + "object": "collection", + "arguments": { + "filter": { + "x": 1 + }, + "update": { + "$inc": { + "x": 1 + } + } + } + } + ], + "expectLogMessages": [ + { + "client": "client", + "messages": [ + { + "level": "debug", + "component": "serverSelection", + "data": { + "message": "Server selection started", + "selector": { + "$$exists": true + }, + "operation": "update", + "topologyDescription": { + "$$exists": true + } + } + }, + { + "level": "debug", + "component": "serverSelection", + "data": { + "message": "Server selection succeeded", + "selector": { + "$$exists": true + }, + "operation": "update", + "topologyDescription": { + "$$exists": true + }, + "serverHost": { + "$$type": "string" + }, + "serverPort": { + "$$type": [ + "int", + "long" + ] + } + } + } + ] + } + ] + }, + { + "description": "A successful delete operation", + "operations": [ + { + "name": "waitForEvent", + "object": "testRunner", + "arguments": { + "client": "client", + "event": { + "topologyDescriptionChangedEvent": {} + }, + "count": 2 + } + }, + { + "name": "deleteOne", + "object": "collection", + "arguments": { + "filter": { + "x": 1 + } + } + } + ], + "expectLogMessages": [ + { + "client": "client", + "messages": [ + { + "level": "debug", + "component": "serverSelection", + "data": { + "message": "Server selection started", + "selector": { + "$$exists": true + }, + "operation": "delete", + "topologyDescription": { + "$$exists": true + } + } + }, + { + "level": "debug", + "component": "serverSelection", + "data": { + "message": "Server selection succeeded", + "selector": { + "$$exists": true + }, + "operation": "delete", + "topologyDescription": { + "$$exists": true + }, + "serverHost": { + "$$type": "string" + }, + "serverPort": { + "$$type": [ + "int", + "long" + ] + } + } + } + ] + } + ] } ] } From 257aa2483be980005d4a54f97025baebcb3102f3 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Thu, 17 Oct 2024 11:01:47 -0500 Subject: [PATCH 047/182] PYTHON-4878 Use shrub.py for load balancer tests (#1941) --- .evergreen/config.yml | 218 +++++++++++++++++++++++--- .evergreen/scripts/generate_config.py | 38 ++++- 2 files changed, 230 insertions(+), 26 deletions(-) diff --git a/.evergreen/config.yml b/.evergreen/config.yml index 54a1ff3368..ae7c0a6590 100644 --- a/.evergreen/config.yml +++ b/.evergreen/config.yml @@ -2354,16 +2354,6 @@ axes: variables: ORCHESTRATION_FILE: "versioned-api-testing.json" - # Run load balancer tests? - - id: loadbalancer - display_name: "Load Balancer" - values: - - id: "enabled" - display_name: "Load Balancer" - variables: - test_loadbalancer: true - batchtime: 10080 # 7 days - - id: serverless display_name: "Serverless" values: @@ -3580,6 +3570,203 @@ buildvariants: VERSION: "8.0" PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 +# Load balancer tests +- name: load-balancer-rhel8-v6.0-py3.9-auth-ssl + tasks: + - name: load-balancer-test + display_name: Load Balancer RHEL8 v6.0 py3.9 Auth SSL + run_on: + - rhel87-small + batchtime: 10080 + expansions: + VERSION: "6.0" + AUTH: auth + SSL: ssl + test_loadbalancer: "true" + PYTHON_BINARY: /opt/python/3.9/bin/python3 +- name: load-balancer-rhel8-v6.0-py3.10-noauth-ssl + tasks: + - name: load-balancer-test + display_name: Load Balancer RHEL8 v6.0 py3.10 NoAuth SSL + run_on: + - rhel87-small + batchtime: 10080 + expansions: + VERSION: "6.0" + AUTH: noauth + SSL: ssl + test_loadbalancer: "true" + PYTHON_BINARY: /opt/python/3.10/bin/python3 +- name: load-balancer-rhel8-v6.0-py3.11-noauth-nossl + tasks: + - name: load-balancer-test + display_name: Load Balancer RHEL8 v6.0 py3.11 NoAuth NoSSL + run_on: + - rhel87-small + batchtime: 10080 + expansions: + VERSION: "6.0" + AUTH: noauth + SSL: nossl + test_loadbalancer: "true" + PYTHON_BINARY: /opt/python/3.11/bin/python3 +- name: load-balancer-rhel8-v7.0-py3.12-auth-ssl + tasks: + - name: load-balancer-test + display_name: Load Balancer RHEL8 v7.0 py3.12 Auth SSL + run_on: + - rhel87-small + batchtime: 10080 + expansions: + VERSION: "7.0" + AUTH: auth + SSL: ssl + test_loadbalancer: "true" + PYTHON_BINARY: /opt/python/3.12/bin/python3 +- name: load-balancer-rhel8-v7.0-py3.13-noauth-ssl + tasks: + - name: load-balancer-test + display_name: Load Balancer RHEL8 v7.0 py3.13 NoAuth SSL + run_on: + - rhel87-small + batchtime: 10080 + expansions: + VERSION: "7.0" + AUTH: noauth + SSL: ssl + test_loadbalancer: "true" + PYTHON_BINARY: /opt/python/3.13/bin/python3 +- name: load-balancer-rhel8-v7.0-pypy3.9-noauth-nossl + tasks: + - name: load-balancer-test + display_name: Load Balancer RHEL8 v7.0 pypy3.9 NoAuth NoSSL + run_on: + - rhel87-small + batchtime: 10080 + expansions: + VERSION: "7.0" + AUTH: noauth + SSL: nossl + test_loadbalancer: "true" + PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 +- name: load-balancer-rhel8-v8.0-pypy3.10-auth-ssl + tasks: + - name: load-balancer-test + display_name: Load Balancer RHEL8 v8.0 pypy3.10 Auth SSL + run_on: + - rhel87-small + batchtime: 10080 + expansions: + VERSION: "8.0" + AUTH: auth + SSL: ssl + test_loadbalancer: "true" + PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 +- name: load-balancer-rhel8-v8.0-py3.9-noauth-ssl + tasks: + - name: load-balancer-test + display_name: Load Balancer RHEL8 v8.0 py3.9 NoAuth SSL + run_on: + - rhel87-small + batchtime: 10080 + expansions: + VERSION: "8.0" + AUTH: noauth + SSL: ssl + test_loadbalancer: "true" + PYTHON_BINARY: /opt/python/3.9/bin/python3 +- name: load-balancer-rhel8-v8.0-py3.10-noauth-nossl + tasks: + - name: load-balancer-test + display_name: Load Balancer RHEL8 v8.0 py3.10 NoAuth NoSSL + run_on: + - rhel87-small + batchtime: 10080 + expansions: + VERSION: "8.0" + AUTH: noauth + SSL: nossl + test_loadbalancer: "true" + PYTHON_BINARY: /opt/python/3.10/bin/python3 +- name: load-balancer-rhel8-latest-py3.11-auth-ssl + tasks: + - name: load-balancer-test + display_name: Load Balancer RHEL8 latest py3.11 Auth SSL + run_on: + - rhel87-small + batchtime: 10080 + expansions: + VERSION: latest + AUTH: auth + SSL: ssl + test_loadbalancer: "true" + PYTHON_BINARY: /opt/python/3.11/bin/python3 +- name: load-balancer-rhel8-latest-py3.12-noauth-ssl + tasks: + - name: load-balancer-test + display_name: Load Balancer RHEL8 latest py3.12 NoAuth SSL + run_on: + - rhel87-small + batchtime: 10080 + expansions: + VERSION: latest + AUTH: noauth + SSL: ssl + test_loadbalancer: "true" + PYTHON_BINARY: /opt/python/3.12/bin/python3 +- name: load-balancer-rhel8-latest-py3.13-noauth-nossl + tasks: + - name: load-balancer-test + display_name: Load Balancer RHEL8 latest py3.13 NoAuth NoSSL + run_on: + - rhel87-small + batchtime: 10080 + expansions: + VERSION: latest + AUTH: noauth + SSL: nossl + test_loadbalancer: "true" + PYTHON_BINARY: /opt/python/3.13/bin/python3 +- name: load-balancer-rhel8-rapid-pypy3.9-auth-ssl + tasks: + - name: load-balancer-test + display_name: Load Balancer RHEL8 rapid pypy3.9 Auth SSL + run_on: + - rhel87-small + batchtime: 10080 + expansions: + VERSION: rapid + AUTH: auth + SSL: ssl + test_loadbalancer: "true" + PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 +- name: load-balancer-rhel8-rapid-pypy3.10-noauth-ssl + tasks: + - name: load-balancer-test + display_name: Load Balancer RHEL8 rapid pypy3.10 NoAuth SSL + run_on: + - rhel87-small + batchtime: 10080 + expansions: + VERSION: rapid + AUTH: noauth + SSL: ssl + test_loadbalancer: "true" + PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 +- name: load-balancer-rhel8-rapid-py3.9-noauth-nossl + tasks: + - name: load-balancer-test + display_name: Load Balancer RHEL8 rapid py3.9 NoAuth NoSSL + run_on: + - rhel87-small + batchtime: 10080 + expansions: + VERSION: rapid + AUTH: noauth + SSL: nossl + test_loadbalancer: "true" + PYTHON_BINARY: /opt/python/3.9/bin/python3 + - matrix_name: "oidc-auth-test" matrix_spec: platform: [ rhel8, macos, windows ] @@ -3643,17 +3830,6 @@ buildvariants: - name: "aws-auth-test-rapid" - name: "aws-auth-test-latest" -- matrix_name: "load-balancer" - matrix_spec: - platform: rhel8 - mongodb-version: ["6.0", "7.0", "8.0", "rapid", "latest"] - auth-ssl: "*" - python-version: "*" - loadbalancer: "*" - display_name: "Load Balancer ${platform} ${python-version} ${mongodb-version} ${auth-ssl}" - tasks: - - name: "load-balancer-test" - - name: testgcpkms-variant display_name: "GCP KMS" run_on: diff --git a/.evergreen/scripts/generate_config.py b/.evergreen/scripts/generate_config.py index dcd97b093e..03b900301c 100644 --- a/.evergreen/scripts/generate_config.py +++ b/.evergreen/scripts/generate_config.py @@ -112,12 +112,14 @@ def get_python_binary(python: str, host: str) -> str: def get_display_name(base: str, host: str, **kwargs) -> str: """Get the display name of a variant.""" display_name = f"{base} {HOSTS[host].display_name}" + version = kwargs.pop("VERSION", None) + if version: + if version not in ["rapid", "latest"]: + version = f"v{version}" + display_name = f"{display_name} {version}" for key, value in kwargs.items(): name = value - if key == "version": - if value not in ["rapid", "latest"]: - name = f"v{value}" - elif key == "python": + if key.lower() == "python": if not value.startswith("pypy"): name = f"py{value}" elif key.lower() in DISPLAY_LOOKUP: @@ -309,8 +311,34 @@ def get_encryption_expansions(encryption, ssl="ssl"): return variants +def create_load_balancer_variants(): + # Load balancer tests - run all supported versions for all combinations of auth and ssl and system python. + host = "rhel8" + task_names = ["load-balancer-test"] + batchtime = BATCHTIME_WEEK + expansions_base = dict(test_loadbalancer="true") + versions = ["6.0", "7.0", "8.0", "latest", "rapid"] + variants = [] + pythons = CPYTHONS + PYPYS + for ind, (version, (auth, ssl)) in enumerate(product(versions, AUTH_SSLS)): + expansions = dict(VERSION=version, AUTH=auth, SSL=ssl) + expansions.update(expansions_base) + python = pythons[ind % len(pythons)] + display_name = get_display_name("Load Balancer", host, python=python, **expansions) + variant = create_variant( + task_names, + display_name, + python=python, + host=host, + expansions=expansions, + batchtime=batchtime, + ) + variants.append(variant) + return variants + + ################## # Generate Config ################## -generate_yaml(variants=create_encryption_variants()) +generate_yaml(variants=create_load_balancer_variants()) From 317a539415a91a4057c9104a2192d05fb47af2e1 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Thu, 17 Oct 2024 15:01:24 -0500 Subject: [PATCH 048/182] PYTHON-4879 Use shrub.py for compressor tests (#1944) --- .evergreen/config.yml | 135 ++++++++++++++++---------- .evergreen/scripts/generate_config.py | 51 +++++++++- 2 files changed, 136 insertions(+), 50 deletions(-) diff --git a/.evergreen/config.yml b/.evergreen/config.yml index ae7c0a6590..a7efc223b9 100644 --- a/.evergreen/config.yml +++ b/.evergreen/config.yml @@ -2112,23 +2112,6 @@ axes: AUTH: "noauth" SSL: "nossl" - # Choice of wire protocol compression support - - id: compression - display_name: Compression - values: - - id: snappy - display_name: snappy compression - variables: - COMPRESSORS: "snappy" - - id: zlib - display_name: zlib compression - variables: - COMPRESSORS: "zlib" - - id: zstd - display_name: zstd compression - variables: - COMPRESSORS: "zstd" - # Choice of MongoDB server version - id: mongodb-version display_name: "MongoDB" @@ -3125,6 +3108,92 @@ buildvariants: PYTHON_BINARY: C:/python/Python313/python.exe tags: [encryption_tag] +# Compressor tests. +- name: snappy-compression-rhel8-py3.9-no-c + tasks: + - name: .standalone + display_name: snappy compression RHEL8 py3.9 No C + run_on: + - rhel87-small + expansions: + COMPRESSORS: snappy + NO_EXT: "1" + PYTHON_BINARY: /opt/python/3.9/bin/python3 +- name: snappy-compression-rhel8-py3.10 + tasks: + - name: .standalone + display_name: snappy compression RHEL8 py3.10 + run_on: + - rhel87-small + expansions: + COMPRESSORS: snappy + PYTHON_BINARY: /opt/python/3.10/bin/python3 +- name: zlib-compression-rhel8-py3.11-no-c + tasks: + - name: .standalone + display_name: zlib compression RHEL8 py3.11 No C + run_on: + - rhel87-small + expansions: + COMPRESSORS: zlib + NO_EXT: "1" + PYTHON_BINARY: /opt/python/3.11/bin/python3 +- name: zlib-compression-rhel8-py3.12 + tasks: + - name: .standalone + display_name: zlib compression RHEL8 py3.12 + run_on: + - rhel87-small + expansions: + COMPRESSORS: zlib + PYTHON_BINARY: /opt/python/3.12/bin/python3 +- name: zstd-compression-rhel8-py3.13-no-c + tasks: + - name: .standalone !.4.0 + display_name: zstd compression RHEL8 py3.13 No C + run_on: + - rhel87-small + expansions: + COMPRESSORS: zstd + NO_EXT: "1" + PYTHON_BINARY: /opt/python/3.13/bin/python3 +- name: zstd-compression-rhel8-py3.9 + tasks: + - name: .standalone !.4.0 + display_name: zstd compression RHEL8 py3.9 + run_on: + - rhel87-small + expansions: + COMPRESSORS: zstd + PYTHON_BINARY: /opt/python/3.9/bin/python3 +- name: snappy-compression-rhel8-pypy3.9 + tasks: + - name: .standalone + display_name: snappy compression RHEL8 pypy3.9 + run_on: + - rhel87-small + expansions: + COMPRESSORS: snappy + PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 +- name: zlib-compression-rhel8-pypy3.10 + tasks: + - name: .standalone + display_name: zlib compression RHEL8 pypy3.10 + run_on: + - rhel87-small + expansions: + COMPRESSORS: zlib + PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 +- name: zstd-compression-rhel8-pypy3.9 + tasks: + - name: .standalone !.4.0 + display_name: zstd compression RHEL8 pypy3.9 + run_on: + - rhel87-small + expansions: + COMPRESSORS: zstd + PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 + - matrix_name: "tests-fips" matrix_spec: platform: @@ -3214,38 +3283,6 @@ buildvariants: - ".4.2" - ".4.0" -- matrix_name: "tests-python-version-rhel8-compression" - matrix_spec: - platform: rhel8 - python-version: "*" - c-extensions: "*" - compression: "*" - exclude_spec: - # These interpreters are always tested without extensions. - - platform: rhel8 - python-version: ["pypy3.9", "pypy3.10"] - c-extensions: "with-c-extensions" - compression: "*" - display_name: "${compression} ${c-extensions} ${python-version} ${platform}" - tasks: - - "test-latest-standalone" - - "test-8.0-standalone" - - "test-7.0-standalone" - - "test-6.0-standalone" - - "test-5.0-standalone" - - "test-4.4-standalone" - - "test-4.2-standalone" - - "test-4.0-standalone" - rules: - # Server version 4.0 supports snappy and zlib but not zstd. - - if: - python-version: "*" - c-extensions: "*" - compression: ["zstd"] - then: - remove_tasks: - - "test-4.0-standalone" - - matrix_name: "tests-python-version-green-framework-rhel8" matrix_spec: platform: rhel8 diff --git a/.evergreen/scripts/generate_config.py b/.evergreen/scripts/generate_config.py index 03b900301c..91dedeb620 100644 --- a/.evergreen/scripts/generate_config.py +++ b/.evergreen/scripts/generate_config.py @@ -30,12 +30,14 @@ BATCHTIME_WEEK = 10080 AUTH_SSLS = [("auth", "ssl"), ("noauth", "ssl"), ("noauth", "nossl")] TOPOLOGIES = ["standalone", "replica_set", "sharded_cluster"] +C_EXTS = ["with_ext", "without_ext"] SYNCS = ["sync", "async"] DISPLAY_LOOKUP = dict( ssl=dict(ssl="SSL", nossl="NoSSL"), auth=dict(auth="Auth", noauth="NoAuth"), test_suites=dict(default="Sync", default_async="Async"), coverage=dict(coverage="cov"), + no_ext={"1": "No C"}, ) HOSTS = dict() @@ -137,6 +139,12 @@ def zip_cycle(*iterables, empty_default=None): yield tuple(next(i, empty_default) for i in cycles) +def handle_c_ext(c_ext, expansions): + """Handle c extension option.""" + if c_ext == C_EXTS[0]: + expansions["NO_EXT"] = "1" + + def generate_yaml(tasks=None, variants=None): """Generate the yaml for a given set of tasks and variants.""" project = EvgProject(tasks=tasks, buildvariants=variants) @@ -337,8 +345,49 @@ def create_load_balancer_variants(): return variants +def create_compression_variants(): + # Compression tests - standalone versions of each server, across python versions, with and without c extensions. + # PyPy interpreters are always tested without extensions. + host = "rhel8" + task_names = dict(snappy=[".standalone"], zlib=[".standalone"], zstd=[".standalone !.4.0"]) + variants = [] + for ind, (compressor, c_ext) in enumerate(product(["snappy", "zlib", "zstd"], C_EXTS)): + expansions = dict(COMPRESSORS=compressor) + handle_c_ext(c_ext, expansions) + base_name = f"{compressor} compression" + python = CPYTHONS[ind % len(CPYTHONS)] + display_name = get_display_name(base_name, host, python=python, **expansions) + variant = create_variant( + task_names[compressor], + display_name, + python=python, + host=host, + expansions=expansions, + ) + variants.append(variant) + + other_pythons = PYPYS + CPYTHONS[ind:] + for compressor, python in zip_cycle(["snappy", "zlib", "zstd"], other_pythons): + expansions = dict(COMPRESSORS=compressor) + handle_c_ext(c_ext, expansions) + base_name = f"{compressor} compression" + display_name = get_display_name(base_name, host, python=python, **expansions) + variant = create_variant( + task_names[compressor], + display_name, + python=python, + host=host, + expansions=expansions, + ) + variants.append(variant) + + return variants + + ################## # Generate Config ################## -generate_yaml(variants=create_load_balancer_variants()) +variants = create_compression_variants() +# print(len(variants)) +generate_yaml(variants=variants) From 7e904b3c31d3242ef9e202438e50cbecf611af75 Mon Sep 17 00:00:00 2001 From: Shane Harvey Date: Thu, 17 Oct 2024 13:11:20 -0700 Subject: [PATCH 049/182] PYTHON-4874 Fix async Windows KMS support (#1942) --- pymongo/network_layer.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pymongo/network_layer.py b/pymongo/network_layer.py index 7a325853c8..aa16e85a07 100644 --- a/pymongo/network_layer.py +++ b/pymongo/network_layer.py @@ -215,11 +215,11 @@ async def _async_receive_ssl( while total_read < length: try: read = conn.recv_into(mv[total_read:]) + if read == 0: + raise OSError("connection closed") # KMS responses update their expected size after the first batch, stop reading after one loop if once: return mv[:read] - if read == 0: - raise OSError("connection closed") except BLOCKING_IO_ERRORS: await asyncio.sleep(backoff) read = 0 From 335b728f070a350239123a755856a1a3d1d51746 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 17 Oct 2024 20:27:27 -0500 Subject: [PATCH 050/182] Bump pyright from 1.1.383 to 1.1.384 (#1922) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Jib --- requirements/typing.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/typing.txt b/requirements/typing.txt index 06c33c6db6..2c23212da7 100644 --- a/requirements/typing.txt +++ b/requirements/typing.txt @@ -1,5 +1,5 @@ mypy==1.11.2 -pyright==1.1.383 +pyright==1.1.384 typing_extensions -r ./encryption.txt -r ./ocsp.txt From 021a9f75243b466d2a333c991ad4d9eb52d8275e Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Fri, 18 Oct 2024 08:57:20 -0500 Subject: [PATCH 051/182] PYTHON-4882 Use shrub.py for enterprise auth tests (#1945) --- .evergreen/config.yml | 83 +++++++++++++++++++++------ .evergreen/run-tests.sh | 2 + .evergreen/scripts/generate_config.py | 23 +++++++- pyproject.toml | 1 + test/asynchronous/test_auth.py | 4 ++ test/test_auth.py | 4 ++ 6 files changed, 98 insertions(+), 19 deletions(-) diff --git a/.evergreen/config.yml b/.evergreen/config.yml index a7efc223b9..9e2ab77088 100644 --- a/.evergreen/config.yml +++ b/.evergreen/config.yml @@ -3194,6 +3194,71 @@ buildvariants: COMPRESSORS: zstd PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 +# Enterprise auth tests. +- name: enterprise-auth-macos-py3.9-auth + tasks: + - name: test-enterprise-auth + display_name: Enterprise Auth macOS py3.9 Auth + run_on: + - macos-14 + expansions: + AUTH: auth + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 +- name: enterprise-auth-rhel8-py3.10-auth + tasks: + - name: test-enterprise-auth + display_name: Enterprise Auth RHEL8 py3.10 Auth + run_on: + - rhel87-small + expansions: + AUTH: auth + PYTHON_BINARY: /opt/python/3.10/bin/python3 +- name: enterprise-auth-rhel8-py3.11-auth + tasks: + - name: test-enterprise-auth + display_name: Enterprise Auth RHEL8 py3.11 Auth + run_on: + - rhel87-small + expansions: + AUTH: auth + PYTHON_BINARY: /opt/python/3.11/bin/python3 +- name: enterprise-auth-rhel8-py3.12-auth + tasks: + - name: test-enterprise-auth + display_name: Enterprise Auth RHEL8 py3.12 Auth + run_on: + - rhel87-small + expansions: + AUTH: auth + PYTHON_BINARY: /opt/python/3.12/bin/python3 +- name: enterprise-auth-win64-py3.13-auth + tasks: + - name: test-enterprise-auth + display_name: Enterprise Auth Win64 py3.13 Auth + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: auth + PYTHON_BINARY: C:/python/Python313/python.exe +- name: enterprise-auth-rhel8-pypy3.9-auth + tasks: + - name: test-enterprise-auth + display_name: Enterprise Auth RHEL8 pypy3.9 Auth + run_on: + - rhel87-small + expansions: + AUTH: auth + PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 +- name: enterprise-auth-rhel8-pypy3.10-auth + tasks: + - name: test-enterprise-auth + display_name: Enterprise Auth RHEL8 pypy3.10 Auth + run_on: + - rhel87-small + expansions: + AUTH: auth + PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 + - matrix_name: "tests-fips" matrix_spec: platform: @@ -3350,24 +3415,6 @@ buildvariants: tasks: - ".latest" -- matrix_name: "test-linux-enterprise-auth" - matrix_spec: - platform: rhel8 - python-version: "*" - auth: "auth" - display_name: "Enterprise ${auth} ${platform} ${python-version}" - tasks: - - name: "test-enterprise-auth" - -- matrix_name: "tests-windows-enterprise-auth" - matrix_spec: - platform: windows - python-version-windows: "*" - auth: "auth" - display_name: "Enterprise ${auth} ${platform} ${python-version-windows}" - tasks: - - name: "test-enterprise-auth" - - matrix_name: "test-search-index-helpers" matrix_spec: platform: rhel8 diff --git a/.evergreen/run-tests.sh b/.evergreen/run-tests.sh index 364570999f..36fa76e317 100755 --- a/.evergreen/run-tests.sh +++ b/.evergreen/run-tests.sh @@ -90,6 +90,8 @@ if [ -n "$TEST_ENTERPRISE_AUTH" ]; then export GSSAPI_HOST=${SASL_HOST} export GSSAPI_PORT=${SASL_PORT} export GSSAPI_PRINCIPAL=${PRINCIPAL} + + export TEST_SUITES="auth" fi if [ -n "$TEST_LOADBALANCER" ]; then diff --git a/.evergreen/scripts/generate_config.py b/.evergreen/scripts/generate_config.py index 91dedeb620..c3bfeef7af 100644 --- a/.evergreen/scripts/generate_config.py +++ b/.evergreen/scripts/generate_config.py @@ -384,10 +384,31 @@ def create_compression_variants(): return variants +def create_enterprise_auth_variants(): + expansions = dict(AUTH="auth") + variants = [] + + # All python versions across platforms. + for python in ALL_PYTHONS: + if python == CPYTHONS[0]: + host = "macos" + elif python == CPYTHONS[-1]: + host = "win64" + else: + host = "rhel8" + display_name = get_display_name("Enterprise Auth", host, python=python, **expansions) + variant = create_variant( + ["test-enterprise-auth"], display_name, host=host, python=python, expansions=expansions + ) + variants.append(variant) + + return variants + + ################## # Generate Config ################## -variants = create_compression_variants() +variants = create_enterprise_auth_variants() # print(len(variants)) generate_yaml(variants=variants) diff --git a/pyproject.toml b/pyproject.toml index b4f59f67d5..9a29a777fc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -99,6 +99,7 @@ filterwarnings = [ markers = [ "auth_aws: tests that rely on pymongo-auth-aws", "auth_oidc: tests that rely on oidc auth", + "auth: tests that rely on authentication", "ocsp: tests that rely on ocsp", "atlas: tests that rely on atlas", "data_lake: tests that rely on atlas data lake", diff --git a/test/asynchronous/test_auth.py b/test/asynchronous/test_auth.py index fbaca41f09..9262714374 100644 --- a/test/asynchronous/test_auth.py +++ b/test/asynchronous/test_auth.py @@ -32,6 +32,8 @@ ) from test.utils import AllowListEventListener, delay, ignore_deprecations +import pytest + from pymongo import AsyncMongoClient, monitoring from pymongo.asynchronous.auth import HAVE_KERBEROS from pymongo.auth_shared import _build_credentials_tuple @@ -42,6 +44,8 @@ _IS_SYNC = False +pytestmark = pytest.mark.auth + # YOU MUST RUN KINIT BEFORE RUNNING GSSAPI TESTS ON UNIX. GSSAPI_HOST = os.environ.get("GSSAPI_HOST") GSSAPI_PORT = int(os.environ.get("GSSAPI_PORT", "27017")) diff --git a/test/test_auth.py b/test/test_auth.py index b311d330bc..310006afff 100644 --- a/test/test_auth.py +++ b/test/test_auth.py @@ -32,6 +32,8 @@ ) from test.utils import AllowListEventListener, delay, ignore_deprecations +import pytest + from pymongo import MongoClient, monitoring from pymongo.auth_shared import _build_credentials_tuple from pymongo.errors import OperationFailure @@ -42,6 +44,8 @@ _IS_SYNC = True +pytestmark = pytest.mark.auth + # YOU MUST RUN KINIT BEFORE RUNNING GSSAPI TESTS ON UNIX. GSSAPI_HOST = os.environ.get("GSSAPI_HOST") GSSAPI_PORT = int(os.environ.get("GSSAPI_PORT", "27017")) From 6a7e83dc95319c445b95ab1f54f4aa8435986cc4 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Fri, 18 Oct 2024 10:36:05 -0500 Subject: [PATCH 052/182] PYTHON-4887 Do not test macos arm64 on server versions < 6.0 (#1947) --- .evergreen/config.yml | 48 ++++++++++++++++++++------- .evergreen/scripts/generate_config.py | 9 +++-- 2 files changed, 43 insertions(+), 14 deletions(-) diff --git a/.evergreen/config.yml b/.evergreen/config.yml index 9e2ab77088..705880be60 100644 --- a/.evergreen/config.yml +++ b/.evergreen/config.yml @@ -2592,7 +2592,11 @@ buildvariants: # Server tests for macOS Arm64. - name: test-macos-arm64-py3.9-auth-ssl-sync tasks: - - name: .standalone + - name: .standalone .6.0 + - name: .standalone .7.0 + - name: .standalone .8.0 + - name: .standalone .rapid + - name: .standalone .latest display_name: Test macOS Arm64 py3.9 Auth SSL Sync run_on: - macos-14-arm64 @@ -2600,11 +2604,15 @@ buildvariants: AUTH: auth SSL: ssl TEST_SUITES: default - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 - name: test-macos-arm64-py3.9-auth-ssl-async tasks: - - name: .standalone + - name: .standalone .6.0 + - name: .standalone .7.0 + - name: .standalone .8.0 + - name: .standalone .rapid + - name: .standalone .latest display_name: Test macOS Arm64 py3.9 Auth SSL Async run_on: - macos-14-arm64 @@ -2612,11 +2620,15 @@ buildvariants: AUTH: auth SSL: ssl TEST_SUITES: default_async - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 - name: test-macos-arm64-py3.13-noauth-ssl-sync tasks: - - name: .replica_set + - name: .replica_set .6.0 + - name: .replica_set .7.0 + - name: .replica_set .8.0 + - name: .replica_set .rapid + - name: .replica_set .latest display_name: Test macOS Arm64 py3.13 NoAuth SSL Sync run_on: - macos-14-arm64 @@ -2624,11 +2636,15 @@ buildvariants: AUTH: noauth SSL: ssl TEST_SUITES: default - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 - name: test-macos-arm64-py3.13-noauth-ssl-async tasks: - - name: .replica_set + - name: .replica_set .6.0 + - name: .replica_set .7.0 + - name: .replica_set .8.0 + - name: .replica_set .rapid + - name: .replica_set .latest display_name: Test macOS Arm64 py3.13 NoAuth SSL Async run_on: - macos-14-arm64 @@ -2636,11 +2652,15 @@ buildvariants: AUTH: noauth SSL: ssl TEST_SUITES: default_async - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 - name: test-macos-arm64-py3.9-noauth-nossl-sync tasks: - - name: .sharded_cluster + - name: .sharded_cluster .6.0 + - name: .sharded_cluster .7.0 + - name: .sharded_cluster .8.0 + - name: .sharded_cluster .rapid + - name: .sharded_cluster .latest display_name: Test macOS Arm64 py3.9 NoAuth NoSSL Sync run_on: - macos-14-arm64 @@ -2648,11 +2668,15 @@ buildvariants: AUTH: noauth SSL: nossl TEST_SUITES: default - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 - name: test-macos-arm64-py3.9-noauth-nossl-async tasks: - - name: .sharded_cluster + - name: .sharded_cluster .6.0 + - name: .sharded_cluster .7.0 + - name: .sharded_cluster .8.0 + - name: .sharded_cluster .rapid + - name: .sharded_cluster .latest display_name: Test macOS Arm64 py3.9 NoAuth NoSSL Async run_on: - macos-14-arm64 @@ -2660,8 +2684,8 @@ buildvariants: AUTH: noauth SSL: nossl TEST_SUITES: default_async - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 # Server tests for Windows. - name: test-win64-py3.9-auth-ssl-sync diff --git a/.evergreen/scripts/generate_config.py b/.evergreen/scripts/generate_config.py index c3bfeef7af..a3ec798c3b 100644 --- a/.evergreen/scripts/generate_config.py +++ b/.evergreen/scripts/generate_config.py @@ -23,6 +23,7 @@ ############## ALL_VERSIONS = ["4.0", "4.4", "5.0", "6.0", "7.0", "8.0", "rapid", "latest"] +VERSIONS_6_0_PLUS = ["6.0", "7.0", "8.0", "rapid", "latest"] CPYTHONS = ["3.9", "3.10", "3.11", "3.12", "3.13"] PYPYS = ["pypy3.9", "pypy3.10"] ALL_PYTHONS = CPYTHONS + PYPYS @@ -239,10 +240,14 @@ def create_server_variants() -> list[BuildVariant]: zip_cycle(MIN_MAX_PYTHON, AUTH_SSLS, TOPOLOGIES), SYNCS ): test_suite = "default" if sync == "sync" else "default_async" + tasks = [f".{topology}"] + # MacOS arm64 only works on server versions 6.0+ + if host == "macos-arm64": + tasks = [f".{topology} .{version}" for version in VERSIONS_6_0_PLUS] expansions = dict(AUTH=auth, SSL=ssl, TEST_SUITES=test_suite, SKIP_CSOT_TESTS="true") display_name = get_display_name("Test", host, python=python, **expansions) variant = create_variant( - [f".{topology}"], + tasks, display_name, python=python, host=host, @@ -409,6 +414,6 @@ def create_enterprise_auth_variants(): # Generate Config ################## -variants = create_enterprise_auth_variants() +variants = create_server_variants() # print(len(variants)) generate_yaml(variants=variants) From 1ae0c3904c9a71bed1f7d8f81183b59070845a81 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Fri, 18 Oct 2024 10:58:28 -0500 Subject: [PATCH 053/182] PYTHON-4886 Use shrub.py for PyOpenSSL tests (#1946) --- .evergreen/config.yml | 144 +++++++++++++++++--------- .evergreen/scripts/generate_config.py | 34 +++++- 2 files changed, 126 insertions(+), 52 deletions(-) diff --git a/.evergreen/config.yml b/.evergreen/config.yml index 705880be60..9083da145b 100644 --- a/.evergreen/config.yml +++ b/.evergreen/config.yml @@ -2305,16 +2305,6 @@ axes: variables: COVERAGE: "coverage" - # Run pyopenssl tests? - - id: pyopenssl - display_name: "PyOpenSSL" - values: - - id: "enabled" - display_name: "PyOpenSSL" - variables: - test_pyopenssl: true - batchtime: 10080 # 7 days - - id: versionedApi display_name: "versionedApi" values: @@ -3283,6 +3273,99 @@ buildvariants: AUTH: auth PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 +# PyOpenSSL tests. +- name: pyopenssl-macos-py3.9 + tasks: + - name: .replica_set + - name: .7.0 + display_name: PyOpenSSL macOS py3.9 + run_on: + - macos-14 + batchtime: 10080 + expansions: + AUTH: noauth + test_pyopenssl: "true" + SSL: ssl + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 +- name: pyopenssl-rhel8-py3.10 + tasks: + - name: .replica_set + - name: .7.0 + display_name: PyOpenSSL RHEL8 py3.10 + run_on: + - rhel87-small + batchtime: 10080 + expansions: + AUTH: auth + test_pyopenssl: "true" + SSL: ssl + PYTHON_BINARY: /opt/python/3.10/bin/python3 +- name: pyopenssl-rhel8-py3.11 + tasks: + - name: .replica_set + - name: .7.0 + display_name: PyOpenSSL RHEL8 py3.11 + run_on: + - rhel87-small + batchtime: 10080 + expansions: + AUTH: auth + test_pyopenssl: "true" + SSL: ssl + PYTHON_BINARY: /opt/python/3.11/bin/python3 +- name: pyopenssl-rhel8-py3.12 + tasks: + - name: .replica_set + - name: .7.0 + display_name: PyOpenSSL RHEL8 py3.12 + run_on: + - rhel87-small + batchtime: 10080 + expansions: + AUTH: auth + test_pyopenssl: "true" + SSL: ssl + PYTHON_BINARY: /opt/python/3.12/bin/python3 +- name: pyopenssl-win64-py3.13 + tasks: + - name: .replica_set + - name: .7.0 + display_name: PyOpenSSL Win64 py3.13 + run_on: + - windows-64-vsMulti-small + batchtime: 10080 + expansions: + AUTH: auth + test_pyopenssl: "true" + SSL: ssl + PYTHON_BINARY: C:/python/Python313/python.exe +- name: pyopenssl-rhel8-pypy3.9 + tasks: + - name: .replica_set + - name: .7.0 + display_name: PyOpenSSL RHEL8 pypy3.9 + run_on: + - rhel87-small + batchtime: 10080 + expansions: + AUTH: auth + test_pyopenssl: "true" + SSL: ssl + PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 +- name: pyopenssl-rhel8-pypy3.10 + tasks: + - name: .replica_set + - name: .7.0 + display_name: PyOpenSSL RHEL8 pypy3.10 + run_on: + - rhel87-small + batchtime: 10080 + expansions: + AUTH: auth + test_pyopenssl: "true" + SSL: ssl + PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 + - matrix_name: "tests-fips" matrix_spec: platform: @@ -3305,47 +3388,6 @@ buildvariants: tasks: - ".6.0" -- matrix_name: "tests-pyopenssl" - matrix_spec: - platform: rhel8 - python-version: "*" - auth: "*" - ssl: "ssl" - pyopenssl: "*" - # Only test "noauth" with Python 3.9. - exclude_spec: - platform: rhel8 - python-version: ["3.10", "3.11", "3.12", "3.13", "pypy3.9", "pypy3.10"] - auth: "noauth" - ssl: "ssl" - pyopenssl: "*" - display_name: "PyOpenSSL ${platform} ${python-version} ${auth}" - tasks: - - '.replica_set' - # Test standalone and sharded only on 7.0. - - '.7.0' - -- matrix_name: "tests-pyopenssl-macOS" - matrix_spec: - platform: macos - auth: "auth" - ssl: "ssl" - pyopenssl: "*" - display_name: "PyOpenSSL ${platform} ${auth}" - tasks: - - '.replica_set' - -- matrix_name: "tests-pyopenssl-windows" - matrix_spec: - platform: windows - python-version-windows: "*" - auth: "auth" - ssl: "ssl" - pyopenssl: "*" - display_name: "PyOpenSSL ${platform} ${python-version-windows} ${auth}" - tasks: - - '.replica_set' - - matrix_name: "tests-python-version-rhel8-without-c-extensions" matrix_spec: platform: rhel8 diff --git a/.evergreen/scripts/generate_config.py b/.evergreen/scripts/generate_config.py index a3ec798c3b..6d614a9afe 100644 --- a/.evergreen/scripts/generate_config.py +++ b/.evergreen/scripts/generate_config.py @@ -410,10 +410,42 @@ def create_enterprise_auth_variants(): return variants +def create_pyopenssl_variants(): + base_name = "PyOpenSSL" + batchtime = BATCHTIME_WEEK + base_expansions = dict(test_pyopenssl="true", SSL="ssl") + variants = [] + + for python in ALL_PYTHONS: + # Only test "noauth" with min python. + auth = "noauth" if python == CPYTHONS[0] else "auth" + if python == CPYTHONS[0]: + host = "macos" + elif python == CPYTHONS[-1]: + host = "win64" + else: + host = "rhel8" + expansions = dict(AUTH=auth) + expansions.update(base_expansions) + + display_name = get_display_name(base_name, host, python=python) + variant = create_variant( + [".replica_set", ".7.0"], + display_name, + python=python, + host=host, + expansions=expansions, + batchtime=batchtime, + ) + variants.append(variant) + + return variants + + ################## # Generate Config ################## -variants = create_server_variants() +variants = create_pyopenssl_variants() # print(len(variants)) generate_yaml(variants=variants) From a1ade45dd3b67a6e4baa50404b9807f68062f043 Mon Sep 17 00:00:00 2001 From: Noah Stapp Date: Fri, 18 Oct 2024 13:32:09 -0400 Subject: [PATCH 054/182] PYTHON-4881 - Use OvertCommandListener wherever sensitive events are not needed (#1943) Co-authored-by: Steven Silvester --- test/asynchronous/test_change_stream.py | 5 +++-- test/asynchronous/test_collation.py | 4 ++-- test/asynchronous/test_collection.py | 3 ++- test/asynchronous/test_cursor.py | 4 ++-- test/asynchronous/test_grid_file.py | 4 ++-- test/asynchronous/test_monitoring.py | 21 ++++++++++++--------- test/asynchronous/test_session.py | 3 ++- test/auth_oidc/test_auth_oidc.py | 6 +++--- test/test_change_stream.py | 5 +++-- test/test_collation.py | 4 ++-- test/test_collection.py | 3 ++- test/test_cursor.py | 4 ++-- test/test_grid_file.py | 4 ++-- test/test_index_management.py | 4 ++-- test/test_monitoring.py | 21 ++++++++++++--------- test/test_read_write_concern_spec.py | 6 +++--- test/test_server_selection.py | 3 ++- test/test_session.py | 3 ++- test/test_ssl.py | 1 + 19 files changed, 61 insertions(+), 47 deletions(-) diff --git a/test/asynchronous/test_change_stream.py b/test/asynchronous/test_change_stream.py index 883ed72c4c..98641f46ee 100644 --- a/test/asynchronous/test_change_stream.py +++ b/test/asynchronous/test_change_stream.py @@ -39,6 +39,7 @@ from test.utils import ( AllowListEventListener, EventListener, + OvertCommandListener, async_wait_until, ) @@ -179,7 +180,7 @@ async def _wait_until(): @no_type_check async def test_try_next_runs_one_getmore(self): - listener = EventListener() + listener = OvertCommandListener() client = await self.async_rs_or_single_client(event_listeners=[listener]) # Connect to the cluster. await client.admin.command("ping") @@ -237,7 +238,7 @@ async def _wait_until(): @no_type_check async def test_batch_size_is_honored(self): - listener = EventListener() + listener = OvertCommandListener() client = await self.async_rs_or_single_client(event_listeners=[listener]) # Connect to the cluster. await client.admin.command("ping") diff --git a/test/asynchronous/test_collation.py b/test/asynchronous/test_collation.py index be3ea22e42..d95f4c9917 100644 --- a/test/asynchronous/test_collation.py +++ b/test/asynchronous/test_collation.py @@ -18,7 +18,7 @@ import functools import warnings from test.asynchronous import AsyncIntegrationTest, async_client_context, unittest -from test.utils import EventListener +from test.utils import EventListener, OvertCommandListener from typing import Any from pymongo.asynchronous.helpers import anext @@ -101,7 +101,7 @@ class TestCollation(AsyncIntegrationTest): @async_client_context.require_connection async def _setup_class(cls): await super()._setup_class() - cls.listener = EventListener() + cls.listener = OvertCommandListener() cls.client = await cls.unmanaged_async_rs_or_single_client(event_listeners=[cls.listener]) cls.db = cls.client.pymongo_test cls.collation = Collation("en_US") diff --git a/test/asynchronous/test_collection.py b/test/asynchronous/test_collection.py index 612090b69f..db52bad4ac 100644 --- a/test/asynchronous/test_collection.py +++ b/test/asynchronous/test_collection.py @@ -36,6 +36,7 @@ from test.utils import ( IMPOSSIBLE_WRITE_CONCERN, EventListener, + OvertCommandListener, async_get_pool, async_is_mongos, async_wait_until, @@ -2116,7 +2117,7 @@ async def test_find_one_and(self): self.assertEqual(4, (await c.find_one_and_update({}, {"$inc": {"i": 1}}, sort=sort))["j"]) async def test_find_one_and_write_concern(self): - listener = EventListener() + listener = OvertCommandListener() db = (await self.async_single_client(event_listeners=[listener]))[self.db.name] # non-default WriteConcern. c_w0 = db.get_collection("test", write_concern=WriteConcern(w=0)) diff --git a/test/asynchronous/test_cursor.py b/test/asynchronous/test_cursor.py index ee0a757ed3..787da3d957 100644 --- a/test/asynchronous/test_cursor.py +++ b/test/asynchronous/test_cursor.py @@ -1601,7 +1601,7 @@ async def test_read_concern(self): await anext(c.find_raw_batches()) async def test_monitoring(self): - listener = EventListener() + listener = OvertCommandListener() client = await self.async_rs_or_single_client(event_listeners=[listener]) c = client.pymongo_test.test await c.drop() @@ -1768,7 +1768,7 @@ async def test_collation(self): await anext(await self.db.test.aggregate_raw_batches([], collation=Collation("en_US"))) async def test_monitoring(self): - listener = EventListener() + listener = OvertCommandListener() client = await self.async_rs_or_single_client(event_listeners=[listener]) c = client.pymongo_test.test await c.drop() diff --git a/test/asynchronous/test_grid_file.py b/test/asynchronous/test_grid_file.py index 9c57c15c5a..54fcd3abf6 100644 --- a/test/asynchronous/test_grid_file.py +++ b/test/asynchronous/test_grid_file.py @@ -33,7 +33,7 @@ sys.path[0:0] = [""] -from test.utils import EventListener +from test.utils import OvertCommandListener from bson.objectid import ObjectId from gridfs.asynchronous.grid_file import ( @@ -810,7 +810,7 @@ async def test_survive_cursor_not_found(self): # Use 102 batches to cause a single getMore. chunk_size = 1024 data = b"d" * (102 * chunk_size) - listener = EventListener() + listener = OvertCommandListener() client = await self.async_rs_or_single_client(event_listeners=[listener]) db = client.pymongo_test async with AsyncGridIn(db.fs, chunk_size=chunk_size) as infile: diff --git a/test/asynchronous/test_monitoring.py b/test/asynchronous/test_monitoring.py index b5d8708dc3..b0c86ab54e 100644 --- a/test/asynchronous/test_monitoring.py +++ b/test/asynchronous/test_monitoring.py @@ -31,6 +31,7 @@ ) from test.utils import ( EventListener, + OvertCommandListener, async_wait_until, ) @@ -54,7 +55,7 @@ class AsyncTestCommandMonitoring(AsyncIntegrationTest): @async_client_context.require_connection async def _setup_class(cls): await super()._setup_class() - cls.listener = EventListener() + cls.listener = OvertCommandListener() cls.client = await cls.unmanaged_async_rs_or_single_client( event_listeners=[cls.listener], retryWrites=False ) @@ -1100,11 +1101,13 @@ async def test_first_batch_helper(self): @async_client_context.require_version_max(6, 1, 99) async def test_sensitive_commands(self): - listeners = self.client._event_listeners + listener = EventListener() + client = await self.async_rs_or_single_client(event_listeners=[listener]) + listeners = client._event_listeners - self.listener.reset() + listener.reset() cmd = SON([("getnonce", 1)]) - listeners.publish_command_start(cmd, "pymongo_test", 12345, await self.client.address, None) # type: ignore[arg-type] + listeners.publish_command_start(cmd, "pymongo_test", 12345, await client.address, None) # type: ignore[arg-type] delta = datetime.timedelta(milliseconds=100) listeners.publish_command_success( delta, @@ -1115,15 +1118,15 @@ async def test_sensitive_commands(self): None, database_name="pymongo_test", ) - started = self.listener.started_events[0] - succeeded = self.listener.succeeded_events[0] - self.assertEqual(0, len(self.listener.failed_events)) + started = listener.started_events[0] + succeeded = listener.succeeded_events[0] + self.assertEqual(0, len(listener.failed_events)) self.assertIsInstance(started, monitoring.CommandStartedEvent) self.assertEqual({}, started.command) self.assertEqual("pymongo_test", started.database_name) self.assertEqual("getnonce", started.command_name) self.assertIsInstance(started.request_id, int) - self.assertEqual(await self.client.address, started.connection_id) + self.assertEqual(await client.address, started.connection_id) self.assertIsInstance(succeeded, monitoring.CommandSucceededEvent) self.assertEqual(succeeded.duration_micros, 100000) self.assertEqual(started.command_name, succeeded.command_name) @@ -1140,7 +1143,7 @@ class AsyncTestGlobalListener(AsyncIntegrationTest): @async_client_context.require_connection async def _setup_class(cls): await super()._setup_class() - cls.listener = EventListener() + cls.listener = OvertCommandListener() # We plan to call register(), which internally modifies _LISTENERS. cls.saved_listeners = copy.deepcopy(monitoring._LISTENERS) monitoring.register(cls.listener) diff --git a/test/asynchronous/test_session.py b/test/asynchronous/test_session.py index d264b5ecb0..b432621798 100644 --- a/test/asynchronous/test_session.py +++ b/test/asynchronous/test_session.py @@ -36,6 +36,7 @@ from test.utils import ( EventListener, ExceptionCatchingThread, + OvertCommandListener, async_wait_until, wait_until, ) @@ -199,7 +200,7 @@ def test_implicit_sessions_checkout(self): lsid_set = set() failures = 0 for _ in range(5): - listener = EventListener() + listener = OvertCommandListener() client = self.async_rs_or_single_client(event_listeners=[listener], maxPoolSize=1) cursor = client.db.test.find({}) ops: List[Tuple[Callable, List[Any]]] = [ diff --git a/test/auth_oidc/test_auth_oidc.py b/test/auth_oidc/test_auth_oidc.py index 6d31f3db4e..6526391daf 100644 --- a/test/auth_oidc/test_auth_oidc.py +++ b/test/auth_oidc/test_auth_oidc.py @@ -31,7 +31,7 @@ sys.path[0:0] = [""] from test.unified_format import generate_test_classes -from test.utils import EventListener +from test.utils import EventListener, OvertCommandListener from bson import SON from pymongo import MongoClient @@ -348,7 +348,7 @@ def test_4_1_reauthenticate_succeeds(self): # Create a default OIDC client and add an event listener. # The following assumes that the driver does not emit saslStart or saslContinue events. # If the driver does emit those events, ignore/filter them for the purposes of this test. - listener = EventListener() + listener = OvertCommandListener() client = self.create_client(event_listeners=[listener]) # Perform a find operation that succeeds. @@ -1021,7 +1021,7 @@ def fetch(self, _): def test_4_4_speculative_authentication_should_be_ignored_on_reauthentication(self): # Create an OIDC configured client that can listen for `SaslStart` commands. - listener = EventListener() + listener = OvertCommandListener() client = self.create_client(event_listeners=[listener]) # Preload the *Client Cache* with a valid access token to enforce Speculative Authentication. diff --git a/test/test_change_stream.py b/test/test_change_stream.py index dae224c5e0..3a107122b7 100644 --- a/test/test_change_stream.py +++ b/test/test_change_stream.py @@ -39,6 +39,7 @@ from test.utils import ( AllowListEventListener, EventListener, + OvertCommandListener, wait_until, ) @@ -177,7 +178,7 @@ def _wait_until(): @no_type_check def test_try_next_runs_one_getmore(self): - listener = EventListener() + listener = OvertCommandListener() client = self.rs_or_single_client(event_listeners=[listener]) # Connect to the cluster. client.admin.command("ping") @@ -235,7 +236,7 @@ def _wait_until(): @no_type_check def test_batch_size_is_honored(self): - listener = EventListener() + listener = OvertCommandListener() client = self.rs_or_single_client(event_listeners=[listener]) # Connect to the cluster. client.admin.command("ping") diff --git a/test/test_collation.py b/test/test_collation.py index e5c1c7eb11..b878df2fb4 100644 --- a/test/test_collation.py +++ b/test/test_collation.py @@ -18,7 +18,7 @@ import functools import warnings from test import IntegrationTest, client_context, unittest -from test.utils import EventListener +from test.utils import EventListener, OvertCommandListener from typing import Any from pymongo.collation import ( @@ -101,7 +101,7 @@ class TestCollation(IntegrationTest): @client_context.require_connection def _setup_class(cls): super()._setup_class() - cls.listener = EventListener() + cls.listener = OvertCommandListener() cls.client = cls.unmanaged_rs_or_single_client(event_listeners=[cls.listener]) cls.db = cls.client.pymongo_test cls.collation = Collation("en_US") diff --git a/test/test_collection.py b/test/test_collection.py index a2c3b0b0b6..84a900d45b 100644 --- a/test/test_collection.py +++ b/test/test_collection.py @@ -36,6 +36,7 @@ from test.utils import ( IMPOSSIBLE_WRITE_CONCERN, EventListener, + OvertCommandListener, get_pool, is_mongos, wait_until, @@ -2093,7 +2094,7 @@ def test_find_one_and(self): self.assertEqual(4, (c.find_one_and_update({}, {"$inc": {"i": 1}}, sort=sort))["j"]) def test_find_one_and_write_concern(self): - listener = EventListener() + listener = OvertCommandListener() db = (self.single_client(event_listeners=[listener]))[self.db.name] # non-default WriteConcern. c_w0 = db.get_collection("test", write_concern=WriteConcern(w=0)) diff --git a/test/test_cursor.py b/test/test_cursor.py index 7a6dfc9429..9eac0f1c49 100644 --- a/test/test_cursor.py +++ b/test/test_cursor.py @@ -1590,7 +1590,7 @@ def test_read_concern(self): next(c.find_raw_batches()) def test_monitoring(self): - listener = EventListener() + listener = OvertCommandListener() client = self.rs_or_single_client(event_listeners=[listener]) c = client.pymongo_test.test c.drop() @@ -1757,7 +1757,7 @@ def test_collation(self): next(self.db.test.aggregate_raw_batches([], collation=Collation("en_US"))) def test_monitoring(self): - listener = EventListener() + listener = OvertCommandListener() client = self.rs_or_single_client(event_listeners=[listener]) c = client.pymongo_test.test c.drop() diff --git a/test/test_grid_file.py b/test/test_grid_file.py index fe88aec5ff..c35efccef5 100644 --- a/test/test_grid_file.py +++ b/test/test_grid_file.py @@ -33,7 +33,7 @@ sys.path[0:0] = [""] -from test.utils import EventListener +from test.utils import OvertCommandListener from bson.objectid import ObjectId from gridfs.errors import NoFile @@ -808,7 +808,7 @@ def test_survive_cursor_not_found(self): # Use 102 batches to cause a single getMore. chunk_size = 1024 data = b"d" * (102 * chunk_size) - listener = EventListener() + listener = OvertCommandListener() client = self.rs_or_single_client(event_listeners=[listener]) db = client.pymongo_test with GridIn(db.fs, chunk_size=chunk_size) as infile: diff --git a/test/test_index_management.py b/test/test_index_management.py index ec1e363737..6ca726e2e0 100644 --- a/test/test_index_management.py +++ b/test/test_index_management.py @@ -27,7 +27,7 @@ from test import IntegrationTest, PyMongoTestCase, unittest from test.unified_format import generate_test_classes -from test.utils import AllowListEventListener, EventListener +from test.utils import AllowListEventListener, EventListener, OvertCommandListener from pymongo.errors import OperationFailure from pymongo.operations import SearchIndexModel @@ -88,7 +88,7 @@ def setUpClass(cls) -> None: url = os.environ.get("MONGODB_URI") username = os.environ["DB_USER"] password = os.environ["DB_PASSWORD"] - cls.listener = listener = EventListener() + cls.listener = listener = OvertCommandListener() cls.client = cls.unmanaged_simple_client( url, username=username, password=password, event_listeners=[listener] ) diff --git a/test/test_monitoring.py b/test/test_monitoring.py index a0c520ed27..75fe5c987a 100644 --- a/test/test_monitoring.py +++ b/test/test_monitoring.py @@ -31,6 +31,7 @@ ) from test.utils import ( EventListener, + OvertCommandListener, wait_until, ) @@ -54,7 +55,7 @@ class TestCommandMonitoring(IntegrationTest): @client_context.require_connection def _setup_class(cls): super()._setup_class() - cls.listener = EventListener() + cls.listener = OvertCommandListener() cls.client = cls.unmanaged_rs_or_single_client( event_listeners=[cls.listener], retryWrites=False ) @@ -1100,11 +1101,13 @@ def test_first_batch_helper(self): @client_context.require_version_max(6, 1, 99) def test_sensitive_commands(self): - listeners = self.client._event_listeners + listener = EventListener() + client = self.rs_or_single_client(event_listeners=[listener]) + listeners = client._event_listeners - self.listener.reset() + listener.reset() cmd = SON([("getnonce", 1)]) - listeners.publish_command_start(cmd, "pymongo_test", 12345, self.client.address, None) # type: ignore[arg-type] + listeners.publish_command_start(cmd, "pymongo_test", 12345, client.address, None) # type: ignore[arg-type] delta = datetime.timedelta(milliseconds=100) listeners.publish_command_success( delta, @@ -1115,15 +1118,15 @@ def test_sensitive_commands(self): None, database_name="pymongo_test", ) - started = self.listener.started_events[0] - succeeded = self.listener.succeeded_events[0] - self.assertEqual(0, len(self.listener.failed_events)) + started = listener.started_events[0] + succeeded = listener.succeeded_events[0] + self.assertEqual(0, len(listener.failed_events)) self.assertIsInstance(started, monitoring.CommandStartedEvent) self.assertEqual({}, started.command) self.assertEqual("pymongo_test", started.database_name) self.assertEqual("getnonce", started.command_name) self.assertIsInstance(started.request_id, int) - self.assertEqual(self.client.address, started.connection_id) + self.assertEqual(client.address, started.connection_id) self.assertIsInstance(succeeded, monitoring.CommandSucceededEvent) self.assertEqual(succeeded.duration_micros, 100000) self.assertEqual(started.command_name, succeeded.command_name) @@ -1140,7 +1143,7 @@ class TestGlobalListener(IntegrationTest): @client_context.require_connection def _setup_class(cls): super()._setup_class() - cls.listener = EventListener() + cls.listener = OvertCommandListener() # We plan to call register(), which internally modifies _LISTENERS. cls.saved_listeners = copy.deepcopy(monitoring._LISTENERS) monitoring.register(cls.listener) diff --git a/test/test_read_write_concern_spec.py b/test/test_read_write_concern_spec.py index 67943d495d..db53b67ae4 100644 --- a/test/test_read_write_concern_spec.py +++ b/test/test_read_write_concern_spec.py @@ -24,7 +24,7 @@ from test import IntegrationTest, client_context, unittest from test.unified_format import generate_test_classes -from test.utils import EventListener +from test.utils import OvertCommandListener from pymongo import DESCENDING from pymongo.errors import ( @@ -44,7 +44,7 @@ class TestReadWriteConcernSpec(IntegrationTest): def test_omit_default_read_write_concern(self): - listener = EventListener() + listener = OvertCommandListener() # Client with default readConcern and writeConcern client = self.rs_or_single_client(event_listeners=[listener]) self.addCleanup(client.close) @@ -205,7 +205,7 @@ def test_error_includes_errInfo(self): @client_context.require_version_min(4, 9) def test_write_error_details_exposes_errinfo(self): - listener = EventListener() + listener = OvertCommandListener() client = self.rs_or_single_client(event_listeners=[listener]) self.addCleanup(client.close) db = client.errinfotest diff --git a/test/test_server_selection.py b/test/test_server_selection.py index 67e9716bf4..984b967f50 100644 --- a/test/test_server_selection.py +++ b/test/test_server_selection.py @@ -33,6 +33,7 @@ from test.utils import ( EventListener, FunctionCallRecorder, + OvertCommandListener, wait_until, ) from test.utils_selection_tests import ( @@ -74,7 +75,7 @@ def custom_selector(servers): return [servers[idx]] # Initialize client with appropriate listeners. - listener = EventListener() + listener = OvertCommandListener() client = self.rs_or_single_client( server_selector=custom_selector, event_listeners=[listener] ) diff --git a/test/test_session.py b/test/test_session.py index 9f94ded927..d0bbb075a8 100644 --- a/test/test_session.py +++ b/test/test_session.py @@ -36,6 +36,7 @@ from test.utils import ( EventListener, ExceptionCatchingThread, + OvertCommandListener, wait_until, ) @@ -198,7 +199,7 @@ def test_implicit_sessions_checkout(self): lsid_set = set() failures = 0 for _ in range(5): - listener = EventListener() + listener = OvertCommandListener() client = self.rs_or_single_client(event_listeners=[listener], maxPoolSize=1) cursor = client.db.test.find({}) ops: List[Tuple[Callable, List[Any]]] = [ diff --git a/test/test_ssl.py b/test/test_ssl.py index 36d7ba12b6..04db9b61a4 100644 --- a/test/test_ssl.py +++ b/test/test_ssl.py @@ -33,6 +33,7 @@ ) from test.utils import ( EventListener, + OvertCommandListener, cat_files, ignore_deprecations, ) From 849ed7970f45a104d35f77dec8c7ec684e596087 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Fri, 18 Oct 2024 15:35:44 -0500 Subject: [PATCH 055/182] PYTHON-4888 Use shrub.py for versioned api tests (#1949) --- .evergreen/config.yml | 106 +++++++++++++++++--------- .evergreen/scripts/generate_config.py | 47 +++++++++++- 2 files changed, 111 insertions(+), 42 deletions(-) diff --git a/.evergreen/config.yml b/.evergreen/config.yml index 9083da145b..cf43d7c246 100644 --- a/.evergreen/config.yml +++ b/.evergreen/config.yml @@ -2305,28 +2305,6 @@ axes: variables: COVERAGE: "coverage" - - id: versionedApi - display_name: "versionedApi" - values: - # Test against a cluster with requireApiVersion=1. - - id: "requireApiVersion1" - display_name: "requireApiVersion1" - tags: [ "versionedApi_tag" ] - variables: - # REQUIRE_API_VERSION is set to make drivers-evergreen-tools - # start a cluster with the requireApiVersion parameter. - REQUIRE_API_VERSION: "1" - # MONGODB_API_VERSION is the apiVersion to use in the test suite. - MONGODB_API_VERSION: "1" - # Test against a cluster with acceptApiVersion2 but without - # requireApiVersion, and don't automatically add apiVersion to - # clients created in the test suite. - - id: "acceptApiVersion2" - display_name: "acceptApiVersion2" - tags: [ "versionedApi_tag" ] - variables: - ORCHESTRATION_FILE: "versioned-api-testing.json" - - id: serverless display_name: "Serverless" values: @@ -3366,6 +3344,74 @@ buildvariants: SSL: ssl PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 +# Versioned API tests. +- name: versioned-api-require-v1-rhel8-py3.9-auth + tasks: + - name: .standalone .5.0 + - name: .standalone .6.0 + - name: .standalone .7.0 + - name: .standalone .8.0 + - name: .standalone .rapid + - name: .standalone .latest + display_name: Versioned API require v1 RHEL8 py3.9 Auth + run_on: + - rhel87-small + expansions: + AUTH: auth + REQUIRE_API_VERSION: "1" + MONGODB_API_VERSION: "1" + PYTHON_BINARY: /opt/python/3.9/bin/python3 + tags: [versionedApi_tag] +- name: versioned-api-accept-v2-rhel8-py3.9-auth + tasks: + - name: .standalone .5.0 + - name: .standalone .6.0 + - name: .standalone .7.0 + - name: .standalone .8.0 + - name: .standalone .rapid + - name: .standalone .latest + display_name: Versioned API accept v2 RHEL8 py3.9 Auth + run_on: + - rhel87-small + expansions: + AUTH: auth + ORCHESTRATION_FILE: versioned-api-testing.json + PYTHON_BINARY: /opt/python/3.9/bin/python3 + tags: [versionedApi_tag] +- name: versioned-api-require-v1-rhel8-py3.13-auth + tasks: + - name: .standalone .5.0 + - name: .standalone .6.0 + - name: .standalone .7.0 + - name: .standalone .8.0 + - name: .standalone .rapid + - name: .standalone .latest + display_name: Versioned API require v1 RHEL8 py3.13 Auth + run_on: + - rhel87-small + expansions: + AUTH: auth + REQUIRE_API_VERSION: "1" + MONGODB_API_VERSION: "1" + PYTHON_BINARY: /opt/python/3.13/bin/python3 + tags: [versionedApi_tag] +- name: versioned-api-accept-v2-rhel8-py3.13-auth + tasks: + - name: .standalone .5.0 + - name: .standalone .6.0 + - name: .standalone .7.0 + - name: .standalone .8.0 + - name: .standalone .rapid + - name: .standalone .latest + display_name: Versioned API accept v2 RHEL8 py3.13 Auth + run_on: + - rhel87-small + expansions: + AUTH: auth + ORCHESTRATION_FILE: versioned-api-testing.json + PYTHON_BINARY: /opt/python/3.13/bin/python3 + tags: [versionedApi_tag] + - matrix_name: "tests-fips" matrix_spec: platform: @@ -3559,22 +3605,6 @@ buildvariants: tasks: - name: atlas-data-lake-tests -- matrix_name: "stable-api-tests" - matrix_spec: - platform: rhel8 - python-version: ["3.9", "3.10"] - auth: "auth" - versionedApi: "*" - display_name: "Versioned API ${versionedApi} ${python-version}" - batchtime: 10080 # 7 days - tasks: - # Versioned API was introduced in MongoDB 4.7 - - "test-latest-standalone" - - "test-8.0-standalone" - - "test-7.0-standalone" - - "test-6.0-standalone" - - "test-5.0-standalone" - # OCSP test matrix. - name: ocsp-test-rhel8-v4.4-py3.9 tasks: diff --git a/.evergreen/scripts/generate_config.py b/.evergreen/scripts/generate_config.py index 6d614a9afe..dafcd4ff4f 100644 --- a/.evergreen/scripts/generate_config.py +++ b/.evergreen/scripts/generate_config.py @@ -23,7 +23,6 @@ ############## ALL_VERSIONS = ["4.0", "4.4", "5.0", "6.0", "7.0", "8.0", "rapid", "latest"] -VERSIONS_6_0_PLUS = ["6.0", "7.0", "8.0", "rapid", "latest"] CPYTHONS = ["3.9", "3.10", "3.11", "3.12", "3.13"] PYPYS = ["pypy3.9", "pypy3.10"] ALL_PYTHONS = CPYTHONS + PYPYS @@ -112,6 +111,14 @@ def get_python_binary(python: str, host: str) -> str: raise ValueError(f"no match found for python {python} on {host}") +def get_pythons_from(min_version: str) -> list[str]: + """Get all pythons starting from a minimum version.""" + min_version_float = float(min_version) + rapid_latest = ["rapid", "latest"] + versions = [v for v in ALL_VERSIONS if v not in rapid_latest] + return [v for v in versions if float(v) >= min_version_float] + rapid_latest + + def get_display_name(base: str, host: str, **kwargs) -> str: """Get the display name of a variant.""" display_name = f"{base} {HOSTS[host].display_name}" @@ -243,7 +250,7 @@ def create_server_variants() -> list[BuildVariant]: tasks = [f".{topology}"] # MacOS arm64 only works on server versions 6.0+ if host == "macos-arm64": - tasks = [f".{topology} .{version}" for version in VERSIONS_6_0_PLUS] + tasks = [f".{topology} .{version}" for version in get_pythons_from("6.0")] expansions = dict(AUTH=auth, SSL=ssl, TEST_SUITES=test_suite, SKIP_CSOT_TESTS="true") display_name = get_display_name("Test", host, python=python, **expansions) variant = create_variant( @@ -330,7 +337,7 @@ def create_load_balancer_variants(): task_names = ["load-balancer-test"] batchtime = BATCHTIME_WEEK expansions_base = dict(test_loadbalancer="true") - versions = ["6.0", "7.0", "8.0", "latest", "rapid"] + versions = get_pythons_from("6.0") variants = [] pythons = CPYTHONS + PYPYS for ind, (version, (auth, ssl)) in enumerate(product(versions, AUTH_SSLS)): @@ -442,10 +449,42 @@ def create_pyopenssl_variants(): return variants +def create_versioned_api_tests(): + host = "rhel8" + tags = ["versionedApi_tag"] + tasks = [f".standalone .{v}" for v in get_pythons_from("5.0")] + variants = [] + types = ["require v1", "accept v2"] + + # All python versions across platforms. + for python, test_type in product(MIN_MAX_PYTHON, types): + expansions = dict(AUTH="auth") + # Test against a cluster with requireApiVersion=1. + if test_type == types[0]: + # REQUIRE_API_VERSION is set to make drivers-evergreen-tools + # start a cluster with the requireApiVersion parameter. + expansions["REQUIRE_API_VERSION"] = "1" + # MONGODB_API_VERSION is the apiVersion to use in the test suite. + expansions["MONGODB_API_VERSION"] = "1" + else: + # Test against a cluster with acceptApiVersion2 but without + # requireApiVersion, and don't automatically add apiVersion to + # clients created in the test suite. + expansions["ORCHESTRATION_FILE"] = "versioned-api-testing.json" + base_display_name = f"Versioned API {test_type}" + display_name = get_display_name(base_display_name, host, python=python, **expansions) + variant = create_variant( + tasks, display_name, host=host, python=python, tags=tags, expansions=expansions + ) + variants.append(variant) + + return variants + + ################## # Generate Config ################## -variants = create_pyopenssl_variants() +variants = create_versioned_api_tests() # print(len(variants)) generate_yaml(variants=variants) From 7e83c8c67f556e61ec7d7d75a544044a04c0b63d Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Mon, 21 Oct 2024 07:24:39 -0500 Subject: [PATCH 056/182] PYTHON-4889 Use shrub.py for green framework tests (#1951) --- .evergreen/config.yml | 74 +++++++++++++++++---------- .evergreen/scripts/generate_config.py | 16 +++++- 2 files changed, 61 insertions(+), 29 deletions(-) diff --git a/.evergreen/config.yml b/.evergreen/config.yml index cf43d7c246..dd35708f7d 100644 --- a/.evergreen/config.yml +++ b/.evergreen/config.yml @@ -2247,19 +2247,6 @@ axes: variables: MOD_WSGI_VERSION: "4" - # Choice of Python async framework - - id: green-framework - display_name: "Green Framework" - values: - - id: "eventlet" - display_name: "Eventlet" - variables: - GREEN_FRAMEWORK: "eventlet" - - id: "gevent" - display_name: "Gevent" - variables: - GREEN_FRAMEWORK: "gevent" - # Install and use the driver's C-extensions? - id: c-extensions display_name: "C Extensions" @@ -3412,6 +3399,52 @@ buildvariants: PYTHON_BINARY: /opt/python/3.13/bin/python3 tags: [versionedApi_tag] +# Green framework tests. +- name: eventlet-rhel8-py3.9 + tasks: + - name: .standalone + display_name: Eventlet RHEL8 py3.9 + run_on: + - rhel87-small + expansions: + GREEN_FRAMEWORK: eventlet + AUTH: auth + SSL: ssl + PYTHON_BINARY: /opt/python/3.9/bin/python3 +- name: gevent-rhel8-py3.9 + tasks: + - name: .standalone + display_name: Gevent RHEL8 py3.9 + run_on: + - rhel87-small + expansions: + GREEN_FRAMEWORK: gevent + AUTH: auth + SSL: ssl + PYTHON_BINARY: /opt/python/3.9/bin/python3 +- name: eventlet-rhel8-py3.12 + tasks: + - name: .standalone + display_name: Eventlet RHEL8 py3.12 + run_on: + - rhel87-small + expansions: + GREEN_FRAMEWORK: eventlet + AUTH: auth + SSL: ssl + PYTHON_BINARY: /opt/python/3.12/bin/python3 +- name: gevent-rhel8-py3.12 + tasks: + - name: .standalone + display_name: Gevent RHEL8 py3.12 + run_on: + - rhel87-small + expansions: + GREEN_FRAMEWORK: gevent + AUTH: auth + SSL: ssl + PYTHON_BINARY: /opt/python/3.12/bin/python3 + - matrix_name: "tests-fips" matrix_spec: platform: @@ -3460,21 +3493,6 @@ buildvariants: - ".4.2" - ".4.0" -- matrix_name: "tests-python-version-green-framework-rhel8" - matrix_spec: - platform: rhel8 - python-version: "*" - green-framework: "*" - auth-ssl: "*" - exclude_spec: - # Don't test green frameworks on these Python versions. - - platform: rhel8 - python-version: ["pypy3.9", "pypy3.10", "3.13"] - green-framework: "*" - auth-ssl: "*" - display_name: "${green-framework} ${python-version} ${platform} ${auth-ssl}" - tasks: *all-server-versions - - matrix_name: "tests-python-version-supports-openssl-102-test-ssl" matrix_spec: platform: rhel7 diff --git a/.evergreen/scripts/generate_config.py b/.evergreen/scripts/generate_config.py index dafcd4ff4f..28b79c10e7 100644 --- a/.evergreen/scripts/generate_config.py +++ b/.evergreen/scripts/generate_config.py @@ -481,10 +481,24 @@ def create_versioned_api_tests(): return variants +def create_green_framework_variants(): + variants = [] + tasks = [".standalone"] + host = "rhel8" + for python, framework in product([CPYTHONS[0], CPYTHONS[-2]], ["eventlet", "gevent"]): + expansions = dict(GREEN_FRAMEWORK=framework, AUTH="auth", SSL="ssl") + display_name = get_display_name(f"{framework.capitalize()}", host, python=python) + variant = create_variant( + tasks, display_name, host=host, python=python, expansions=expansions + ) + variants.append(variant) + return variants + + ################## # Generate Config ################## -variants = create_versioned_api_tests() +variants = create_green_framework_variants() # print(len(variants)) generate_yaml(variants=variants) From 60109e660c18f8e93a7229c3c3202983053eeb41 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 21 Oct 2024 07:32:56 -0500 Subject: [PATCH 057/182] Bump mypy from 1.11.2 to 1.12.1 (#1953) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/typing.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/typing.txt b/requirements/typing.txt index 2c23212da7..7ccc122f53 100644 --- a/requirements/typing.txt +++ b/requirements/typing.txt @@ -1,4 +1,4 @@ -mypy==1.11.2 +mypy==1.12.1 pyright==1.1.384 typing_extensions -r ./encryption.txt From 5280596141d577dd13832a36a5fc6409d09e9bad Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Mon, 21 Oct 2024 08:16:12 -0500 Subject: [PATCH 058/182] PYTHON-4890 Use shrub.py for storage engine tests (#1955) --- .evergreen/config.yml | 73 ++++++++++----------------- .evergreen/scripts/generate_config.py | 43 ++++++++++++++-- 2 files changed, 66 insertions(+), 50 deletions(-) diff --git a/.evergreen/config.yml b/.evergreen/config.yml index dd35708f7d..4ffdfca581 100644 --- a/.evergreen/config.yml +++ b/.evergreen/config.yml @@ -2260,19 +2260,6 @@ axes: variables: NO_EXT: "" - # Choice of MongoDB storage engine - - id: storage-engine - display_name: Storage - values: - - id: mmapv1 - display_name: MMAPv1 - variables: - STORAGE_ENGINE: "mmapv1" - - id: inmemory - display_name: InMemory - variables: - STORAGE_ENGINE: "inmemory" - # Run with test commands disabled on server? - id: disableTestCommands display_name: Disable test commands @@ -3331,6 +3318,34 @@ buildvariants: SSL: ssl PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 +# Storage Engine tests. +- name: storage-inmemory-rhel8-py3.9 + tasks: + - name: .standalone .4.0 + - name: .standalone .4.4 + - name: .standalone .5.0 + - name: .standalone .6.0 + - name: .standalone .7.0 + - name: .standalone .8.0 + - name: .standalone .rapid + - name: .standalone .latest + display_name: Storage InMemory RHEL8 py3.9 + run_on: + - rhel87-small + expansions: + STORAGE_ENGINE: inmemory + PYTHON_BINARY: /opt/python/3.9/bin/python3 +- name: storage-mmapv1-rhel8-py3.9 + tasks: + - name: .standalone .4.0 + - name: .replica_set .4.0 + display_name: Storage MMAPv1 RHEL8 py3.9 + run_on: + - rhel87-small + expansions: + STORAGE_ENGINE: mmapv1 + PYTHON_BINARY: /opt/python/3.9/bin/python3 + # Versioned API tests. - name: versioned-api-require-v1-rhel8-py3.9-auth tasks: @@ -3503,38 +3518,6 @@ buildvariants: tasks: - ".5.0" -# Storage engine tests on RHEL 8.4 (x86_64) with Python 3.9. -- matrix_name: "tests-storage-engines" - matrix_spec: - platform: rhel8 - storage-engine: "*" - python-version: "3.9" - display_name: "Storage ${storage-engine} ${python-version} ${platform}" - rules: - - if: - platform: rhel8 - storage-engine: ["inmemory"] - python-version: "*" - then: - add_tasks: - - "test-latest-standalone" - - "test-8.0-standalone" - - "test-7.0-standalone" - - "test-6.0-standalone" - - "test-5.0-standalone" - - "test-4.4-standalone" - - "test-4.2-standalone" - - "test-4.0-standalone" - - if: - # MongoDB 4.2 drops support for MMAPv1 - platform: rhel8 - storage-engine: ["mmapv1"] - python-version: "*" - then: - add_tasks: - - "test-4.0-standalone" - - "test-4.0-replica_set" - # enableTestCommands=0 tests on RHEL 8.4 (x86_64) with Python 3.9. - matrix_name: "test-disableTestCommands" matrix_spec: diff --git a/.evergreen/scripts/generate_config.py b/.evergreen/scripts/generate_config.py index 28b79c10e7..2d20d2de68 100644 --- a/.evergreen/scripts/generate_config.py +++ b/.evergreen/scripts/generate_config.py @@ -111,14 +111,24 @@ def get_python_binary(python: str, host: str) -> str: raise ValueError(f"no match found for python {python} on {host}") -def get_pythons_from(min_version: str) -> list[str]: - """Get all pythons starting from a minimum version.""" +def get_versions_from(min_version: str) -> list[str]: + """Get all server versions starting from a minimum version.""" min_version_float = float(min_version) rapid_latest = ["rapid", "latest"] versions = [v for v in ALL_VERSIONS if v not in rapid_latest] return [v for v in versions if float(v) >= min_version_float] + rapid_latest +def get_versions_until(max_version: str) -> list[str]: + """Get all server version up to a max version.""" + max_version_float = float(max_version) + versions = [v for v in ALL_VERSIONS if v not in ["rapid", "latest"]] + versions = [v for v in versions if float(v) <= max_version_float] + if not len(versions): + raise ValueError(f"No server versions found less <= {max_version}") + return versions + + def get_display_name(base: str, host: str, **kwargs) -> str: """Get the display name of a variant.""" display_name = f"{base} {HOSTS[host].display_name}" @@ -250,7 +260,7 @@ def create_server_variants() -> list[BuildVariant]: tasks = [f".{topology}"] # MacOS arm64 only works on server versions 6.0+ if host == "macos-arm64": - tasks = [f".{topology} .{version}" for version in get_pythons_from("6.0")] + tasks = [f".{topology} .{version}" for version in get_versions_from("6.0")] expansions = dict(AUTH=auth, SSL=ssl, TEST_SUITES=test_suite, SKIP_CSOT_TESTS="true") display_name = get_display_name("Test", host, python=python, **expansions) variant = create_variant( @@ -337,7 +347,7 @@ def create_load_balancer_variants(): task_names = ["load-balancer-test"] batchtime = BATCHTIME_WEEK expansions_base = dict(test_loadbalancer="true") - versions = get_pythons_from("6.0") + versions = get_versions_from("6.0") variants = [] pythons = CPYTHONS + PYPYS for ind, (version, (auth, ssl)) in enumerate(product(versions, AUTH_SSLS)): @@ -449,10 +459,33 @@ def create_pyopenssl_variants(): return variants +def create_storage_engine_tests(): + host = "rhel8" + engines = ["InMemory", "MMAPv1"] + variants = [] + for engine in engines: + python = CPYTHONS[0] + expansions = dict(STORAGE_ENGINE=engine.lower()) + if engine == engines[0]: + tasks = [f".standalone .{v}" for v in ALL_VERSIONS] + else: + # MongoDB 4.2 drops support for MMAPv1 + versions = get_versions_until("4.0") + tasks = [f".standalone .{v}" for v in versions] + [ + f".replica_set .{v}" for v in versions + ] + display_name = get_display_name(f"Storage {engine}", host, python=python) + variant = create_variant( + tasks, display_name, host=host, python=python, expansions=expansions + ) + variants.append(variant) + return variants + + def create_versioned_api_tests(): host = "rhel8" tags = ["versionedApi_tag"] - tasks = [f".standalone .{v}" for v in get_pythons_from("5.0")] + tasks = [f".standalone .{v}" for v in get_versions_from("5.0")] variants = [] types = ["require v1", "accept v2"] From 4003edf267b831cfd4d017fc8d3d3826940ee19c Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Mon, 21 Oct 2024 09:45:14 -0500 Subject: [PATCH 059/182] PYTHON-4891 Use shrub.py for c extension tests (#1956) --- .evergreen/config.yml | 132 ++++++++++++++++---------- .evergreen/scripts/generate_config.py | 32 ++++++- 2 files changed, 114 insertions(+), 50 deletions(-) diff --git a/.evergreen/config.yml b/.evergreen/config.yml index 4ffdfca581..3230b827b0 100644 --- a/.evergreen/config.yml +++ b/.evergreen/config.yml @@ -2247,19 +2247,6 @@ axes: variables: MOD_WSGI_VERSION: "4" - # Install and use the driver's C-extensions? - - id: c-extensions - display_name: "C Extensions" - values: - - id: "without-c-extensions" - display_name: "Without C Extensions" - variables: - NO_EXT: "1" - - id: "with-c-extensions" - display_name: "With C Extensions" - variables: - NO_EXT: "" - # Run with test commands disabled on server? - id: disableTestCommands display_name: Disable test commands @@ -3460,6 +3447,89 @@ buildvariants: SSL: ssl PYTHON_BINARY: /opt/python/3.12/bin/python3 +# No C Ext tests. +- name: no-c-ext-rhel8-py3.9 + tasks: + - name: .standalone + display_name: No C Ext RHEL8 py3.9 + run_on: + - rhel87-small + expansions: + NO_EXT: "1" + PYTHON_BINARY: /opt/python/3.9/bin/python3 +- name: no-c-ext-rhel8-py3.10 + tasks: + - name: .replica_set + display_name: No C Ext RHEL8 py3.10 + run_on: + - rhel87-small + expansions: + NO_EXT: "1" + PYTHON_BINARY: /opt/python/3.10/bin/python3 +- name: no-c-ext-rhel8-py3.11 + tasks: + - name: .sharded_cluster + display_name: No C Ext RHEL8 py3.11 + run_on: + - rhel87-small + expansions: + NO_EXT: "1" + PYTHON_BINARY: /opt/python/3.11/bin/python3 +- name: no-c-ext-rhel8-py3.12 + tasks: + - name: .standalone + display_name: No C Ext RHEL8 py3.12 + run_on: + - rhel87-small + expansions: + NO_EXT: "1" + PYTHON_BINARY: /opt/python/3.12/bin/python3 +- name: no-c-ext-rhel8-py3.13 + tasks: + - name: .replica_set + display_name: No C Ext RHEL8 py3.13 + run_on: + - rhel87-small + expansions: + NO_EXT: "1" + PYTHON_BINARY: /opt/python/3.13/bin/python3 + +# Atlas Data Lake tests. +- name: atlas-data-lake-rhel8-py3.9-no-c + tasks: + - name: atlas-data-lake-tests + display_name: Atlas Data Lake RHEL8 py3.9 No C + run_on: + - rhel87-small + expansions: + NO_EXT: "1" + PYTHON_BINARY: /opt/python/3.9/bin/python3 +- name: atlas-data-lake-rhel8-py3.9 + tasks: + - name: atlas-data-lake-tests + display_name: Atlas Data Lake RHEL8 py3.9 + run_on: + - rhel87-small + expansions: + PYTHON_BINARY: /opt/python/3.9/bin/python3 +- name: atlas-data-lake-rhel8-py3.13-no-c + tasks: + - name: atlas-data-lake-tests + display_name: Atlas Data Lake RHEL8 py3.13 No C + run_on: + - rhel87-small + expansions: + NO_EXT: "1" + PYTHON_BINARY: /opt/python/3.13/bin/python3 +- name: atlas-data-lake-rhel8-py3.13 + tasks: + - name: atlas-data-lake-tests + display_name: Atlas Data Lake RHEL8 py3.13 + run_on: + - rhel87-small + expansions: + PYTHON_BINARY: /opt/python/3.13/bin/python3 + - matrix_name: "tests-fips" matrix_spec: platform: @@ -3482,32 +3552,6 @@ buildvariants: tasks: - ".6.0" -- matrix_name: "tests-python-version-rhel8-without-c-extensions" - matrix_spec: - platform: rhel8 - python-version: "*" - c-extensions: without-c-extensions - auth-ssl: noauth-nossl - coverage: "*" - exclude_spec: - # These interpreters are always tested without extensions. - - platform: rhel8 - python-version: ["pypy3.9", "pypy3.10"] - c-extensions: "*" - auth-ssl: "*" - coverage: "*" - display_name: "${c-extensions} ${python-version} ${platform} ${auth} ${ssl} ${coverage}" - tasks: &all-server-versions - - ".rapid" - - ".latest" - - ".8.0" - - ".7.0" - - ".6.0" - - ".5.0" - - ".4.4" - - ".4.2" - - ".4.0" - - matrix_name: "tests-python-version-supports-openssl-102-test-ssl" matrix_spec: platform: rhel7 @@ -3596,16 +3640,6 @@ buildvariants: tasks: - "serverless_task_group" -- matrix_name: "data-lake-spec-tests" - matrix_spec: - platform: ubuntu-22.04 - python-version: ["3.9", "3.10"] - auth: "auth" - c-extensions: "*" - display_name: "Atlas Data Lake ${python-version} ${c-extensions}" - tasks: - - name: atlas-data-lake-tests - # OCSP test matrix. - name: ocsp-test-rhel8-v4.4-py3.9 tasks: diff --git a/.evergreen/scripts/generate_config.py b/.evergreen/scripts/generate_config.py index 2d20d2de68..aa5bf18c9e 100644 --- a/.evergreen/scripts/generate_config.py +++ b/.evergreen/scripts/generate_config.py @@ -528,10 +528,40 @@ def create_green_framework_variants(): return variants +def generate_no_c_ext_variants(): + variants = [] + host = "rhel8" + for python, topology in zip_cycle(CPYTHONS, TOPOLOGIES): + tasks = [f".{topology}"] + expansions = dict() + handle_c_ext(C_EXTS[0], expansions) + display_name = get_display_name("No C Ext", host, python=python) + variant = create_variant( + tasks, display_name, host=host, python=python, expansions=expansions + ) + variants.append(variant) + return variants + + +def generate_atlas_data_lake_variants(): + variants = [] + host = "rhel8" + for python, c_ext in product(MIN_MAX_PYTHON, C_EXTS): + tasks = ["atlas-data-lake-tests"] + expansions = dict() + handle_c_ext(c_ext, expansions) + display_name = get_display_name("Atlas Data Lake", host, python=python, **expansions) + variant = create_variant( + tasks, display_name, host=host, python=python, expansions=expansions + ) + variants.append(variant) + return variants + + ################## # Generate Config ################## -variants = create_green_framework_variants() +variants = generate_atlas_data_lake_variants() # print(len(variants)) generate_yaml(variants=variants) From 081ad89b844a8080539e2b45cb315156778bf3c1 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Mon, 21 Oct 2024 12:05:56 -0500 Subject: [PATCH 060/182] PYTHON-4894 Fix handling of auth test marker (#1958) --- test/pytest_conf.py | 23 +++++++++++------------ 1 file changed, 11 insertions(+), 12 deletions(-) diff --git a/test/pytest_conf.py b/test/pytest_conf.py index 75f3e74322..a6e24cd9b1 100644 --- a/test/pytest_conf.py +++ b/test/pytest_conf.py @@ -2,15 +2,14 @@ def pytest_collection_modifyitems(items, config): - sync_items = [] - async_items = [ - item - for item in items - if "asynchronous" in item.fspath.dirname or sync_items.append(item) # type: ignore[func-returns-value] - ] - for item in async_items: - if not any(item.iter_markers()): - item.add_marker("default_async") - for item in sync_items: - if not any(item.iter_markers()): - item.add_marker("default") + # Markers that should overlap with the default markers. + overlap_markers = ["async"] + + for item in items: + if "asynchronous" in item.fspath.dirname: + default_marker = "default_async" + else: + default_marker = "default" + markers = [m for m in item.iter_markers() if m not in overlap_markers] + if not markers: + item.add_marker(default_marker) From 25de52ae5d6cc14ff27ba16363ee4aeb6d5b3b92 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Mon, 21 Oct 2024 13:11:07 -0500 Subject: [PATCH 061/182] PYTHON-4892 Use shrub.py for remaining axes (#1957) --- .evergreen/config.yml | 201 +++++++++----------------- .evergreen/scripts/generate_config.py | 52 ++++++- 2 files changed, 115 insertions(+), 138 deletions(-) diff --git a/.evergreen/config.yml b/.evergreen/config.yml index 3230b827b0..ebc070f345 100644 --- a/.evergreen/config.yml +++ b/.evergreen/config.yml @@ -2112,47 +2112,6 @@ axes: AUTH: "noauth" SSL: "nossl" - # Choice of MongoDB server version - - id: mongodb-version - display_name: "MongoDB" - values: - - id: "4.0" - display_name: "MongoDB 4.0" - variables: - VERSION: "4.0" - - id: "4.2" - display_name: "MongoDB 4.2" - variables: - VERSION: "4.2" - - id: "4.4" - display_name: "MongoDB 4.4" - variables: - VERSION: "4.4" - - id: "5.0" - display_name: "MongoDB 5.0" - variables: - VERSION: "5.0" - - id: "6.0" - display_name: "MongoDB 6.0" - variables: - VERSION: "6.0" - - id: "7.0" - display_name: "MongoDB 7.0" - variables: - VERSION: "7.0" - - id: "8.0" - display_name: "MongoDB 8.0" - variables: - VERSION: "8.0" - - id: "latest" - display_name: "MongoDB latest" - variables: - VERSION: "latest" - - id: "rapid" - display_name: "MongoDB rapid" - variables: - VERSION: "rapid" - # Choice of Python runtime version - id: python-version display_name: "Python" @@ -2212,69 +2171,6 @@ axes: variables: PYTHON_BINARY: "C:/python/Python313/python.exe" - - id: python-version-windows-32 - display_name: "Python" - values: - - - - id: "3.9" - display_name: "32-bit Python 3.9" - variables: - PYTHON_BINARY: "C:/python/32/Python39/python.exe" - - id: "3.10" - display_name: "32-bit Python 3.10" - variables: - PYTHON_BINARY: "C:/python/32/Python310/python.exe" - - id: "3.11" - display_name: "32-bit Python 3.11" - variables: - PYTHON_BINARY: "C:/python/32/Python311/python.exe" - - id: "3.12" - display_name: "32-bit Python 3.12" - variables: - PYTHON_BINARY: "C:/python/32/Python312/python.exe" - - id: "3.13" - display_name: "32-bit Python 3.13" - variables: - PYTHON_BINARY: "C:/python/32/Python313/python.exe" - - # Choice of mod_wsgi version - - id: mod-wsgi-version - display_name: "mod_wsgi version" - values: - - id: "4" - display_name: "mod_wsgi 4.x" - variables: - MOD_WSGI_VERSION: "4" - - # Run with test commands disabled on server? - - id: disableTestCommands - display_name: Disable test commands - values: - - id: disabled - display_name: disabled - variables: - DISABLE_TEST_COMMANDS: "1" - - # Generate coverage report? - - id: coverage - display_name: "Coverage" - values: - - id: "coverage" - display_name: "Coverage" - tags: ["coverage_tag"] - variables: - COVERAGE: "coverage" - - - id: serverless - display_name: "Serverless" - values: - - id: "enabled" - display_name: "Serverless" - variables: - test_serverless: true - batchtime: 10080 # 7 days - buildvariants: # Server Tests for RHEL8. - name: test-rhel8-py3.9-auth-ssl-cov @@ -3530,6 +3426,71 @@ buildvariants: expansions: PYTHON_BINARY: /opt/python/3.13/bin/python3 +# Mod_wsgi tests. +- name: mod_wsgi-ubuntu-22-py3.9 + tasks: + - name: mod-wsgi-standalone + - name: mod-wsgi-replica-set + - name: mod-wsgi-embedded-mode-standalone + - name: mod-wsgi-embedded-mode-replica-set + display_name: mod_wsgi Ubuntu-22 py3.9 + run_on: + - ubuntu2204-small + expansions: + MOD_WSGI_VERSION: "4" + PYTHON_BINARY: /opt/python/3.9/bin/python3 +- name: mod_wsgi-ubuntu-22-py3.13 + tasks: + - name: mod-wsgi-standalone + - name: mod-wsgi-replica-set + - name: mod-wsgi-embedded-mode-standalone + - name: mod-wsgi-embedded-mode-replica-set + display_name: mod_wsgi Ubuntu-22 py3.13 + run_on: + - ubuntu2204-small + expansions: + MOD_WSGI_VERSION: "4" + PYTHON_BINARY: /opt/python/3.13/bin/python3 + +# Disable test commands variants. +- name: disable-test-commands-rhel8-py3.9 + tasks: + - name: .latest + display_name: Disable test commands RHEL8 py3.9 + run_on: + - rhel87-small + expansions: + AUTH: auth + SSL: ssl + DISABLE_TEST_COMMANDS: "1" + PYTHON_BINARY: /opt/python/3.9/bin/python3 + +# Serverless variants. +- name: serverless-rhel8-py3.9 + tasks: + - name: serverless_task_group + display_name: Serverless RHEL8 py3.9 + run_on: + - rhel87-small + batchtime: 10080 + expansions: + test_serverless: "true" + AUTH: auth + SSL: ssl + PYTHON_BINARY: /opt/python/3.9/bin/python3 +- name: serverless-rhel8-py3.13 + tasks: + - name: serverless_task_group + display_name: Serverless RHEL8 py3.13 + run_on: + - rhel87-small + batchtime: 10080 + expansions: + test_serverless: "true" + AUTH: auth + SSL: ssl + PYTHON_BINARY: /opt/python/3.13/bin/python3 + - matrix_name: "tests-fips" matrix_spec: platform: @@ -3562,16 +3523,6 @@ buildvariants: tasks: - ".5.0" -# enableTestCommands=0 tests on RHEL 8.4 (x86_64) with Python 3.9. -- matrix_name: "test-disableTestCommands" - matrix_spec: - platform: rhel8 - disableTestCommands: "*" - python-version: "3.9" - display_name: "Disable test commands ${python-version} ${platform}" - tasks: - - ".latest" - - matrix_name: "test-search-index-helpers" matrix_spec: platform: rhel8 @@ -3580,18 +3531,6 @@ buildvariants: tasks: - name: "test_atlas_task_group_search_indexes" -- matrix_name: "tests-mod-wsgi" - matrix_spec: - platform: ubuntu-22.04 - python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] - mod-wsgi-version: "*" - display_name: "${mod-wsgi-version} ${python-version} ${platform}" - tasks: - - name: "mod-wsgi-standalone" - - name: "mod-wsgi-replica-set" - - name: "mod-wsgi-embedded-mode-standalone" - - name: "mod-wsgi-embedded-mode-replica-set" - - matrix_name: "mockupdb-tests" matrix_spec: platform: rhel8 @@ -3630,16 +3569,6 @@ buildvariants: tasks: - name: "atlas-connect" -- matrix_name: "serverless" - matrix_spec: - platform: rhel8 - python-version: "*" - auth-ssl: auth-ssl - serverless: "enabled" - display_name: "${serverless} ${python-version} ${platform}" - tasks: - - "serverless_task_group" - # OCSP test matrix. - name: ocsp-test-rhel8-v4.4-py3.9 tasks: diff --git a/.evergreen/scripts/generate_config.py b/.evergreen/scripts/generate_config.py index aa5bf18c9e..d94c6c02fc 100644 --- a/.evergreen/scripts/generate_config.py +++ b/.evergreen/scripts/generate_config.py @@ -54,6 +54,7 @@ class Host: HOSTS["win32"] = Host("win32", "windows-64-vsMulti-small", "Win32") HOSTS["macos"] = Host("macos", "macos-14", "macOS") HOSTS["macos-arm64"] = Host("macos-arm64", "macos-14-arm64", "macOS Arm64") +HOSTS["ubuntu22"] = Host("ubuntu22", "ubuntu2204-small", "Ubuntu-22") ############## @@ -102,7 +103,7 @@ def get_python_binary(python: str, host: str) -> str: python = python.replace(".", "") return f"{base}/Python{python}/python.exe" - if host == "rhel8": + if host in ["rhel8", "ubuntu22"]: return f"/opt/python/{python}/bin/python3" if host in ["macos", "macos-arm64"]: @@ -558,10 +559,57 @@ def generate_atlas_data_lake_variants(): return variants +def generate_mod_wsgi_variants(): + variants = [] + host = "ubuntu22" + tasks = [ + "mod-wsgi-standalone", + "mod-wsgi-replica-set", + "mod-wsgi-embedded-mode-standalone", + "mod-wsgi-embedded-mode-replica-set", + ] + expansions = dict(MOD_WSGI_VERSION="4") + for python in MIN_MAX_PYTHON: + display_name = get_display_name("mod_wsgi", host, python=python) + variant = create_variant( + tasks, display_name, host=host, python=python, expansions=expansions + ) + variants.append(variant) + return variants + + +def generate_disable_test_commands_variants(): + host = "rhel8" + expansions = dict(AUTH="auth", SSL="ssl", DISABLE_TEST_COMMANDS="1") + python = CPYTHONS[0] + display_name = get_display_name("Disable test commands", host, python=python) + tasks = [".latest"] + return [create_variant(tasks, display_name, host=host, python=python, expansions=expansions)] + + +def generate_serverless_variants(): + host = "rhel8" + batchtime = BATCHTIME_WEEK + expansions = dict(test_serverless="true", AUTH="auth", SSL="ssl") + tasks = ["serverless_task_group"] + base_name = "Serverless" + return [ + create_variant( + tasks, + get_display_name(base_name, host, python=python), + host=host, + python=python, + expansions=expansions, + batchtime=batchtime, + ) + for python in MIN_MAX_PYTHON + ] + + ################## # Generate Config ################## -variants = generate_atlas_data_lake_variants() +variants = generate_serverless_variants() # print(len(variants)) generate_yaml(variants=variants) From 1ace0455d78adac5cf95e5414cdc4b61d90fe6d0 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Mon, 21 Oct 2024 15:47:18 -0500 Subject: [PATCH 062/182] PYTHON-4898 Ensure consistent versions of tests across hosts (#1961) --- .evergreen/config.yml | 446 ++++++++++++++++++++++---- .evergreen/scripts/generate_config.py | 11 +- 2 files changed, 382 insertions(+), 75 deletions(-) diff --git a/.evergreen/config.yml b/.evergreen/config.yml index ebc070f345..cad0863eaa 100644 --- a/.evergreen/config.yml +++ b/.evergreen/config.yml @@ -2172,7 +2172,7 @@ axes: PYTHON_BINARY: "C:/python/Python313/python.exe" buildvariants: -# Server Tests for RHEL8. +# Server Tests. - name: test-rhel8-py3.9-auth-ssl-cov tasks: - name: .standalone @@ -2339,8 +2339,6 @@ buildvariants: AUTH: auth SSL: ssl PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 - -# Server tests for MacOS. - name: test-macos-py3.9-auth-ssl-sync tasks: - name: .standalone @@ -2351,8 +2349,32 @@ buildvariants: AUTH: auth SSL: ssl TEST_SUITES: default + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 +- name: test-macos-py3.9-noauth-ssl-sync + tasks: + - name: .standalone + display_name: Test macOS py3.9 NoAuth SSL Sync + run_on: + - macos-14 + expansions: + AUTH: noauth + SSL: ssl + TEST_SUITES: default + SKIP_CSOT_TESTS: "true" PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 +- name: test-macos-py3.9-noauth-nossl-sync + tasks: + - name: .standalone + display_name: Test macOS py3.9 NoAuth NoSSL Sync + run_on: + - macos-14 + expansions: + AUTH: noauth + SSL: nossl + TEST_SUITES: default SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 - name: test-macos-py3.9-auth-ssl-async tasks: - name: .standalone @@ -2363,11 +2385,47 @@ buildvariants: AUTH: auth SSL: ssl TEST_SUITES: default_async + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 +- name: test-macos-py3.9-noauth-ssl-async + tasks: + - name: .standalone + display_name: Test macOS py3.9 NoAuth SSL Async + run_on: + - macos-14 + expansions: + AUTH: noauth + SSL: ssl + TEST_SUITES: default_async + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 +- name: test-macos-py3.9-noauth-nossl-async + tasks: + - name: .standalone + display_name: Test macOS py3.9 NoAuth NoSSL Async + run_on: + - macos-14 + expansions: + AUTH: noauth + SSL: nossl + TEST_SUITES: default_async + SKIP_CSOT_TESTS: "true" PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 +- name: test-macos-py3.13-auth-ssl-sync + tasks: + - name: .sharded_cluster + display_name: Test macOS py3.13 Auth SSL Sync + run_on: + - macos-14 + expansions: + AUTH: auth + SSL: ssl + TEST_SUITES: default SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 - name: test-macos-py3.13-noauth-ssl-sync tasks: - - name: .replica_set + - name: .sharded_cluster display_name: Test macOS py3.13 NoAuth SSL Sync run_on: - macos-14 @@ -2375,46 +2433,56 @@ buildvariants: AUTH: noauth SSL: ssl TEST_SUITES: default - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 SKIP_CSOT_TESTS: "true" -- name: test-macos-py3.13-noauth-ssl-async + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 +- name: test-macos-py3.13-noauth-nossl-sync tasks: - - name: .replica_set - display_name: Test macOS py3.13 NoAuth SSL Async + - name: .sharded_cluster + display_name: Test macOS py3.13 NoAuth NoSSL Sync run_on: - macos-14 expansions: AUTH: noauth + SSL: nossl + TEST_SUITES: default + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 +- name: test-macos-py3.13-auth-ssl-async + tasks: + - name: .sharded_cluster + display_name: Test macOS py3.13 Auth SSL Async + run_on: + - macos-14 + expansions: + AUTH: auth SSL: ssl TEST_SUITES: default_async - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 SKIP_CSOT_TESTS: "true" -- name: test-macos-py3.9-noauth-nossl-sync + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 +- name: test-macos-py3.13-noauth-ssl-async tasks: - name: .sharded_cluster - display_name: Test macOS py3.9 NoAuth NoSSL Sync + display_name: Test macOS py3.13 NoAuth SSL Async run_on: - macos-14 expansions: AUTH: noauth - SSL: nossl - TEST_SUITES: default - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 + SSL: ssl + TEST_SUITES: default_async SKIP_CSOT_TESTS: "true" -- name: test-macos-py3.9-noauth-nossl-async + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 +- name: test-macos-py3.13-noauth-nossl-async tasks: - name: .sharded_cluster - display_name: Test macOS py3.9 NoAuth NoSSL Async + display_name: Test macOS py3.13 NoAuth NoSSL Async run_on: - macos-14 expansions: AUTH: noauth SSL: nossl TEST_SUITES: default_async - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 SKIP_CSOT_TESTS: "true" - -# Server tests for macOS Arm64. + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 - name: test-macos-arm64-py3.9-auth-ssl-sync tasks: - name: .standalone .6.0 @@ -2431,6 +2499,38 @@ buildvariants: TEST_SUITES: default SKIP_CSOT_TESTS: "true" PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 +- name: test-macos-arm64-py3.9-noauth-ssl-sync + tasks: + - name: .standalone .6.0 + - name: .standalone .7.0 + - name: .standalone .8.0 + - name: .standalone .rapid + - name: .standalone .latest + display_name: Test macOS Arm64 py3.9 NoAuth SSL Sync + run_on: + - macos-14-arm64 + expansions: + AUTH: noauth + SSL: ssl + TEST_SUITES: default + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 +- name: test-macos-arm64-py3.9-noauth-nossl-sync + tasks: + - name: .standalone .6.0 + - name: .standalone .7.0 + - name: .standalone .8.0 + - name: .standalone .rapid + - name: .standalone .latest + display_name: Test macOS Arm64 py3.9 NoAuth NoSSL Sync + run_on: + - macos-14-arm64 + expansions: + AUTH: noauth + SSL: nossl + TEST_SUITES: default + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 - name: test-macos-arm64-py3.9-auth-ssl-async tasks: - name: .standalone .6.0 @@ -2447,13 +2547,61 @@ buildvariants: TEST_SUITES: default_async SKIP_CSOT_TESTS: "true" PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 +- name: test-macos-arm64-py3.9-noauth-ssl-async + tasks: + - name: .standalone .6.0 + - name: .standalone .7.0 + - name: .standalone .8.0 + - name: .standalone .rapid + - name: .standalone .latest + display_name: Test macOS Arm64 py3.9 NoAuth SSL Async + run_on: + - macos-14-arm64 + expansions: + AUTH: noauth + SSL: ssl + TEST_SUITES: default_async + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 +- name: test-macos-arm64-py3.9-noauth-nossl-async + tasks: + - name: .standalone .6.0 + - name: .standalone .7.0 + - name: .standalone .8.0 + - name: .standalone .rapid + - name: .standalone .latest + display_name: Test macOS Arm64 py3.9 NoAuth NoSSL Async + run_on: + - macos-14-arm64 + expansions: + AUTH: noauth + SSL: nossl + TEST_SUITES: default_async + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 +- name: test-macos-arm64-py3.13-auth-ssl-sync + tasks: + - name: .sharded_cluster .6.0 + - name: .sharded_cluster .7.0 + - name: .sharded_cluster .8.0 + - name: .sharded_cluster .rapid + - name: .sharded_cluster .latest + display_name: Test macOS Arm64 py3.13 Auth SSL Sync + run_on: + - macos-14-arm64 + expansions: + AUTH: auth + SSL: ssl + TEST_SUITES: default + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 - name: test-macos-arm64-py3.13-noauth-ssl-sync tasks: - - name: .replica_set .6.0 - - name: .replica_set .7.0 - - name: .replica_set .8.0 - - name: .replica_set .rapid - - name: .replica_set .latest + - name: .sharded_cluster .6.0 + - name: .sharded_cluster .7.0 + - name: .sharded_cluster .8.0 + - name: .sharded_cluster .rapid + - name: .sharded_cluster .latest display_name: Test macOS Arm64 py3.13 NoAuth SSL Sync run_on: - macos-14-arm64 @@ -2463,46 +2611,62 @@ buildvariants: TEST_SUITES: default SKIP_CSOT_TESTS: "true" PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 -- name: test-macos-arm64-py3.13-noauth-ssl-async +- name: test-macos-arm64-py3.13-noauth-nossl-sync tasks: - - name: .replica_set .6.0 - - name: .replica_set .7.0 - - name: .replica_set .8.0 - - name: .replica_set .rapid - - name: .replica_set .latest - display_name: Test macOS Arm64 py3.13 NoAuth SSL Async + - name: .sharded_cluster .6.0 + - name: .sharded_cluster .7.0 + - name: .sharded_cluster .8.0 + - name: .sharded_cluster .rapid + - name: .sharded_cluster .latest + display_name: Test macOS Arm64 py3.13 NoAuth NoSSL Sync run_on: - macos-14-arm64 expansions: AUTH: noauth + SSL: nossl + TEST_SUITES: default + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 +- name: test-macos-arm64-py3.13-auth-ssl-async + tasks: + - name: .sharded_cluster .6.0 + - name: .sharded_cluster .7.0 + - name: .sharded_cluster .8.0 + - name: .sharded_cluster .rapid + - name: .sharded_cluster .latest + display_name: Test macOS Arm64 py3.13 Auth SSL Async + run_on: + - macos-14-arm64 + expansions: + AUTH: auth SSL: ssl TEST_SUITES: default_async SKIP_CSOT_TESTS: "true" PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 -- name: test-macos-arm64-py3.9-noauth-nossl-sync +- name: test-macos-arm64-py3.13-noauth-ssl-async tasks: - name: .sharded_cluster .6.0 - name: .sharded_cluster .7.0 - name: .sharded_cluster .8.0 - name: .sharded_cluster .rapid - name: .sharded_cluster .latest - display_name: Test macOS Arm64 py3.9 NoAuth NoSSL Sync + display_name: Test macOS Arm64 py3.13 NoAuth SSL Async run_on: - macos-14-arm64 expansions: AUTH: noauth - SSL: nossl - TEST_SUITES: default + SSL: ssl + TEST_SUITES: default_async SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 -- name: test-macos-arm64-py3.9-noauth-nossl-async + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 +- name: test-macos-arm64-py3.13-noauth-nossl-async tasks: - name: .sharded_cluster .6.0 - name: .sharded_cluster .7.0 - name: .sharded_cluster .8.0 - name: .sharded_cluster .rapid - name: .sharded_cluster .latest - display_name: Test macOS Arm64 py3.9 NoAuth NoSSL Async + display_name: Test macOS Arm64 py3.13 NoAuth NoSSL Async run_on: - macos-14-arm64 expansions: @@ -2510,9 +2674,7 @@ buildvariants: SSL: nossl TEST_SUITES: default_async SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 - -# Server tests for Windows. + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 - name: test-win64-py3.9-auth-ssl-sync tasks: - name: .standalone @@ -2523,8 +2685,32 @@ buildvariants: AUTH: auth SSL: ssl TEST_SUITES: default + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: C:/python/Python39/python.exe +- name: test-win64-py3.9-noauth-ssl-sync + tasks: + - name: .standalone + display_name: Test Win64 py3.9 NoAuth SSL Sync + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: noauth + SSL: ssl + TEST_SUITES: default + SKIP_CSOT_TESTS: "true" PYTHON_BINARY: C:/python/Python39/python.exe +- name: test-win64-py3.9-noauth-nossl-sync + tasks: + - name: .standalone + display_name: Test Win64 py3.9 NoAuth NoSSL Sync + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: noauth + SSL: nossl + TEST_SUITES: default SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: C:/python/Python39/python.exe - name: test-win64-py3.9-auth-ssl-async tasks: - name: .standalone @@ -2535,11 +2721,47 @@ buildvariants: AUTH: auth SSL: ssl TEST_SUITES: default_async + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: C:/python/Python39/python.exe +- name: test-win64-py3.9-noauth-ssl-async + tasks: + - name: .standalone + display_name: Test Win64 py3.9 NoAuth SSL Async + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: noauth + SSL: ssl + TEST_SUITES: default_async + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: C:/python/Python39/python.exe +- name: test-win64-py3.9-noauth-nossl-async + tasks: + - name: .standalone + display_name: Test Win64 py3.9 NoAuth NoSSL Async + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: noauth + SSL: nossl + TEST_SUITES: default_async + SKIP_CSOT_TESTS: "true" PYTHON_BINARY: C:/python/Python39/python.exe +- name: test-win64-py3.13-auth-ssl-sync + tasks: + - name: .sharded_cluster + display_name: Test Win64 py3.13 Auth SSL Sync + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: auth + SSL: ssl + TEST_SUITES: default SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: C:/python/Python313/python.exe - name: test-win64-py3.13-noauth-ssl-sync tasks: - - name: .replica_set + - name: .sharded_cluster display_name: Test Win64 py3.13 NoAuth SSL Sync run_on: - windows-64-vsMulti-small @@ -2547,44 +2769,56 @@ buildvariants: AUTH: noauth SSL: ssl TEST_SUITES: default - PYTHON_BINARY: C:/python/Python313/python.exe SKIP_CSOT_TESTS: "true" -- name: test-win64-py3.13-noauth-ssl-async + PYTHON_BINARY: C:/python/Python313/python.exe +- name: test-win64-py3.13-noauth-nossl-sync tasks: - - name: .replica_set - display_name: Test Win64 py3.13 NoAuth SSL Async + - name: .sharded_cluster + display_name: Test Win64 py3.13 NoAuth NoSSL Sync run_on: - windows-64-vsMulti-small expansions: AUTH: noauth + SSL: nossl + TEST_SUITES: default + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: C:/python/Python313/python.exe +- name: test-win64-py3.13-auth-ssl-async + tasks: + - name: .sharded_cluster + display_name: Test Win64 py3.13 Auth SSL Async + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: auth SSL: ssl TEST_SUITES: default_async - PYTHON_BINARY: C:/python/Python313/python.exe SKIP_CSOT_TESTS: "true" -- name: test-win64-py3.9-noauth-nossl-sync + PYTHON_BINARY: C:/python/Python313/python.exe +- name: test-win64-py3.13-noauth-ssl-async tasks: - name: .sharded_cluster - display_name: Test Win64 py3.9 NoAuth NoSSL Sync + display_name: Test Win64 py3.13 NoAuth SSL Async run_on: - windows-64-vsMulti-small expansions: AUTH: noauth - SSL: nossl - TEST_SUITES: default - PYTHON_BINARY: C:/python/Python39/python.exe + SSL: ssl + TEST_SUITES: default_async SKIP_CSOT_TESTS: "true" -- name: test-win64-py3.9-noauth-nossl-async + PYTHON_BINARY: C:/python/Python313/python.exe +- name: test-win64-py3.13-noauth-nossl-async tasks: - name: .sharded_cluster - display_name: Test Win64 py3.9 NoAuth NoSSL Async + display_name: Test Win64 py3.13 NoAuth NoSSL Async run_on: - windows-64-vsMulti-small expansions: AUTH: noauth SSL: nossl TEST_SUITES: default_async - PYTHON_BINARY: C:/python/Python39/python.exe SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: C:/python/Python313/python.exe - name: test-win32-py3.9-auth-ssl-sync tasks: - name: .standalone @@ -2595,10 +2829,32 @@ buildvariants: AUTH: auth SSL: ssl TEST_SUITES: default + SKIP_CSOT_TESTS: "true" PYTHON_BINARY: C:/python/32/Python39/python.exe +- name: test-win32-py3.9-noauth-ssl-sync + tasks: + - name: .standalone + display_name: Test Win32 py3.9 NoAuth SSL Sync + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: noauth + SSL: ssl + TEST_SUITES: default SKIP_CSOT_TESTS: "true" - -# Server tests for Win32. + PYTHON_BINARY: C:/python/32/Python39/python.exe +- name: test-win32-py3.9-noauth-nossl-sync + tasks: + - name: .standalone + display_name: Test Win32 py3.9 NoAuth NoSSL Sync + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: noauth + SSL: nossl + TEST_SUITES: default + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: C:/python/32/Python39/python.exe - name: test-win32-py3.9-auth-ssl-async tasks: - name: .standalone @@ -2609,11 +2865,47 @@ buildvariants: AUTH: auth SSL: ssl TEST_SUITES: default_async + SKIP_CSOT_TESTS: "true" PYTHON_BINARY: C:/python/32/Python39/python.exe +- name: test-win32-py3.9-noauth-ssl-async + tasks: + - name: .standalone + display_name: Test Win32 py3.9 NoAuth SSL Async + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: noauth + SSL: ssl + TEST_SUITES: default_async SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: C:/python/32/Python39/python.exe +- name: test-win32-py3.9-noauth-nossl-async + tasks: + - name: .standalone + display_name: Test Win32 py3.9 NoAuth NoSSL Async + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: noauth + SSL: nossl + TEST_SUITES: default_async + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: C:/python/32/Python39/python.exe +- name: test-win32-py3.13-auth-ssl-sync + tasks: + - name: .sharded_cluster + display_name: Test Win32 py3.13 Auth SSL Sync + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: auth + SSL: ssl + TEST_SUITES: default + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: C:/python/32/Python313/python.exe - name: test-win32-py3.13-noauth-ssl-sync tasks: - - name: .replica_set + - name: .sharded_cluster display_name: Test Win32 py3.13 NoAuth SSL Sync run_on: - windows-64-vsMulti-small @@ -2621,44 +2913,56 @@ buildvariants: AUTH: noauth SSL: ssl TEST_SUITES: default - PYTHON_BINARY: C:/python/32/Python313/python.exe SKIP_CSOT_TESTS: "true" -- name: test-win32-py3.13-noauth-ssl-async + PYTHON_BINARY: C:/python/32/Python313/python.exe +- name: test-win32-py3.13-noauth-nossl-sync tasks: - - name: .replica_set - display_name: Test Win32 py3.13 NoAuth SSL Async + - name: .sharded_cluster + display_name: Test Win32 py3.13 NoAuth NoSSL Sync run_on: - windows-64-vsMulti-small expansions: AUTH: noauth + SSL: nossl + TEST_SUITES: default + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: C:/python/32/Python313/python.exe +- name: test-win32-py3.13-auth-ssl-async + tasks: + - name: .sharded_cluster + display_name: Test Win32 py3.13 Auth SSL Async + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: auth SSL: ssl TEST_SUITES: default_async - PYTHON_BINARY: C:/python/32/Python313/python.exe SKIP_CSOT_TESTS: "true" -- name: test-win32-py3.9-noauth-nossl-sync + PYTHON_BINARY: C:/python/32/Python313/python.exe +- name: test-win32-py3.13-noauth-ssl-async tasks: - name: .sharded_cluster - display_name: Test Win32 py3.9 NoAuth NoSSL Sync + display_name: Test Win32 py3.13 NoAuth SSL Async run_on: - windows-64-vsMulti-small expansions: AUTH: noauth - SSL: nossl - TEST_SUITES: default - PYTHON_BINARY: C:/python/32/Python39/python.exe + SSL: ssl + TEST_SUITES: default_async SKIP_CSOT_TESTS: "true" -- name: test-win32-py3.9-noauth-nossl-async + PYTHON_BINARY: C:/python/32/Python313/python.exe +- name: test-win32-py3.13-noauth-nossl-async tasks: - name: .sharded_cluster - display_name: Test Win32 py3.9 NoAuth NoSSL Async + display_name: Test Win32 py3.13 NoAuth NoSSL Async run_on: - windows-64-vsMulti-small expansions: AUTH: noauth SSL: nossl TEST_SUITES: default_async - PYTHON_BINARY: C:/python/32/Python39/python.exe SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: C:/python/32/Python313/python.exe # Encryption tests. - name: encryption-rhel8-py3.9-auth-ssl diff --git a/.evergreen/scripts/generate_config.py b/.evergreen/scripts/generate_config.py index d94c6c02fc..5a682594e9 100644 --- a/.evergreen/scripts/generate_config.py +++ b/.evergreen/scripts/generate_config.py @@ -254,10 +254,13 @@ def create_server_variants() -> list[BuildVariant]: # Test a subset on each of the other platforms. for host in ("macos", "macos-arm64", "win64", "win32"): - for (python, (auth, ssl), topology), sync in product( - zip_cycle(MIN_MAX_PYTHON, AUTH_SSLS, TOPOLOGIES), SYNCS - ): + for ( + python, + sync, + (auth, ssl), + ) in product(MIN_MAX_PYTHON, SYNCS, AUTH_SSLS): test_suite = "default" if sync == "sync" else "default_async" + topology = TOPOLOGIES[0] if python == CPYTHONS[0] else TOPOLOGIES[-1] tasks = [f".{topology}"] # MacOS arm64 only works on server versions 6.0+ if host == "macos-arm64": @@ -610,6 +613,6 @@ def generate_serverless_variants(): # Generate Config ################## -variants = generate_serverless_variants() +variants = create_server_variants() # print(len(variants)) generate_yaml(variants=variants) From 6ca766e066577f9f334c52e0c1ea154e938b24eb Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Mon, 21 Oct 2024 18:41:17 -0500 Subject: [PATCH 063/182] PYTHON-4893 Use shrub.py for aws auth tests (#1959) --- .evergreen/config.yml | 135 ++++++++++++++++++-------- .evergreen/scripts/generate_config.py | 33 ++++++- 2 files changed, 126 insertions(+), 42 deletions(-) diff --git a/.evergreen/config.yml b/.evergreen/config.yml index cad0863eaa..e357f02f2b 100644 --- a/.evergreen/config.yml +++ b/.evergreen/config.yml @@ -3795,6 +3795,100 @@ buildvariants: SSL: ssl PYTHON_BINARY: /opt/python/3.13/bin/python3 +# AWS Auth tests. +- name: aws-auth-ubuntu-20-py3.9 + tasks: + - name: aws-auth-test-4.4 + - name: aws-auth-test-5.0 + - name: aws-auth-test-6.0 + - name: aws-auth-test-7.0 + - name: aws-auth-test-8.0 + - name: aws-auth-test-rapid + - name: aws-auth-test-latest + display_name: AWS Auth Ubuntu-20 py3.9 + run_on: + - ubuntu2004-small + expansions: + PYTHON_BINARY: /opt/python/3.9/bin/python3 +- name: aws-auth-ubuntu-20-py3.13 + tasks: + - name: aws-auth-test-4.4 + - name: aws-auth-test-5.0 + - name: aws-auth-test-6.0 + - name: aws-auth-test-7.0 + - name: aws-auth-test-8.0 + - name: aws-auth-test-rapid + - name: aws-auth-test-latest + display_name: AWS Auth Ubuntu-20 py3.13 + run_on: + - ubuntu2004-small + expansions: + PYTHON_BINARY: /opt/python/3.13/bin/python3 +- name: aws-auth-win64-py3.9 + tasks: + - name: aws-auth-test-4.4 + - name: aws-auth-test-5.0 + - name: aws-auth-test-6.0 + - name: aws-auth-test-7.0 + - name: aws-auth-test-8.0 + - name: aws-auth-test-rapid + - name: aws-auth-test-latest + display_name: AWS Auth Win64 py3.9 + run_on: + - windows-64-vsMulti-small + expansions: + skip_ECS_auth_test: "true" + PYTHON_BINARY: C:/python/Python39/python.exe +- name: aws-auth-win64-py3.13 + tasks: + - name: aws-auth-test-4.4 + - name: aws-auth-test-5.0 + - name: aws-auth-test-6.0 + - name: aws-auth-test-7.0 + - name: aws-auth-test-8.0 + - name: aws-auth-test-rapid + - name: aws-auth-test-latest + display_name: AWS Auth Win64 py3.13 + run_on: + - windows-64-vsMulti-small + expansions: + skip_ECS_auth_test: "true" + PYTHON_BINARY: C:/python/Python313/python.exe +- name: aws-auth-macos-py3.9 + tasks: + - name: aws-auth-test-4.4 + - name: aws-auth-test-5.0 + - name: aws-auth-test-6.0 + - name: aws-auth-test-7.0 + - name: aws-auth-test-8.0 + - name: aws-auth-test-rapid + - name: aws-auth-test-latest + display_name: AWS Auth macOS py3.9 + run_on: + - macos-14 + expansions: + skip_ECS_auth_test: "true" + skip_EC2_auth_test: "true" + skip_web_identity_auth_test: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 +- name: aws-auth-macos-py3.13 + tasks: + - name: aws-auth-test-4.4 + - name: aws-auth-test-5.0 + - name: aws-auth-test-6.0 + - name: aws-auth-test-7.0 + - name: aws-auth-test-8.0 + - name: aws-auth-test-rapid + - name: aws-auth-test-latest + display_name: AWS Auth macOS py3.13 + run_on: + - macos-14 + expansions: + skip_ECS_auth_test: "true" + skip_EC2_auth_test: "true" + skip_web_identity_auth_test: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 + - matrix_name: "tests-fips" matrix_spec: platform: @@ -4237,47 +4331,6 @@ buildvariants: - name: testgcpoidc_task_group batchtime: 20160 # Use a batchtime of 14 days as suggested by the CSFLE test README -- matrix_name: "aws-auth-test" - matrix_spec: - platform: [ubuntu-20.04] - python-version: ["3.9"] - display_name: "MONGODB-AWS Auth ${platform} ${python-version}" - tasks: - - name: "aws-auth-test-4.4" - - name: "aws-auth-test-5.0" - - name: "aws-auth-test-6.0" - - name: "aws-auth-test-7.0" - - name: "aws-auth-test-8.0" - - name: "aws-auth-test-rapid" - - name: "aws-auth-test-latest" - -- matrix_name: "aws-auth-test-mac" - matrix_spec: - platform: [macos] - display_name: "MONGODB-AWS Auth ${platform} ${python-version-mac}" - tasks: - - name: "aws-auth-test-4.4" - - name: "aws-auth-test-5.0" - - name: "aws-auth-test-6.0" - - name: "aws-auth-test-7.0" - - name: "aws-auth-test-8.0" - - name: "aws-auth-test-rapid" - - name: "aws-auth-test-latest" - -- matrix_name: "aws-auth-test-windows" - matrix_spec: - platform: [windows] - python-version-windows: "*" - display_name: "MONGODB-AWS Auth ${platform} ${python-version-windows}" - tasks: - - name: "aws-auth-test-4.4" - - name: "aws-auth-test-5.0" - - name: "aws-auth-test-6.0" - - name: "aws-auth-test-7.0" - - name: "aws-auth-test-8.0" - - name: "aws-auth-test-rapid" - - name: "aws-auth-test-latest" - - name: testgcpkms-variant display_name: "GCP KMS" run_on: diff --git a/.evergreen/scripts/generate_config.py b/.evergreen/scripts/generate_config.py index 5a682594e9..3f1ea724ed 100644 --- a/.evergreen/scripts/generate_config.py +++ b/.evergreen/scripts/generate_config.py @@ -54,6 +54,7 @@ class Host: HOSTS["win32"] = Host("win32", "windows-64-vsMulti-small", "Win32") HOSTS["macos"] = Host("macos", "macos-14", "macOS") HOSTS["macos-arm64"] = Host("macos-arm64", "macos-14-arm64", "macOS Arm64") +HOSTS["ubuntu20"] = Host("ubuntu20", "ubuntu2004-small", "Ubuntu-20") HOSTS["ubuntu22"] = Host("ubuntu22", "ubuntu2204-small", "Ubuntu-22") @@ -103,7 +104,7 @@ def get_python_binary(python: str, host: str) -> str: python = python.replace(".", "") return f"{base}/Python{python}/python.exe" - if host in ["rhel8", "ubuntu22"]: + if host in ["rhel8", "ubuntu22", "ubuntu20"]: return f"/opt/python/{python}/bin/python3" if host in ["macos", "macos-arm64"]: @@ -609,6 +610,36 @@ def generate_serverless_variants(): ] +def generate_aws_auth_variants(): + variants = [] + tasks = [ + "aws-auth-test-4.4", + "aws-auth-test-5.0", + "aws-auth-test-6.0", + "aws-auth-test-7.0", + "aws-auth-test-8.0", + "aws-auth-test-rapid", + "aws-auth-test-latest", + ] + + for host, python in product(["ubuntu20", "win64", "macos"], MIN_MAX_PYTHON): + expansions = dict() + if host != "ubuntu20": + expansions["skip_ECS_auth_test"] = "true" + if host == "macos": + expansions["skip_EC2_auth_test"] = "true" + expansions["skip_web_identity_auth_test"] = "true" + variant = create_variant( + tasks, + get_display_name("AWS Auth", host, python=python), + host=host, + python=python, + expansions=expansions, + ) + variants.append(variant) + return variants + + ################## # Generate Config ################## From 5f7afeaed6257ae740ce319a644c2aae083a0063 Mon Sep 17 00:00:00 2001 From: "Jeffrey A. Clark" Date: Tue, 22 Oct 2024 13:07:56 -0400 Subject: [PATCH 064/182] PYTHON-4883 Add release date to changelog entries (#1952) --- doc/changelog.rst | 494 +++++++++++++++++++++++----------------------- 1 file changed, 252 insertions(+), 242 deletions(-) diff --git a/doc/changelog.rst b/doc/changelog.rst index 4c1955d19d..29fddb7b5c 100644 --- a/doc/changelog.rst +++ b/doc/changelog.rst @@ -1,8 +1,8 @@ Changelog ========= -Changes in Version 4.11.0 -------------------------- +Changes in Version 4.11.0 (YYYY/MM/DD) +-------------------------------------- .. warning:: PyMongo 4.11 drops support for Python 3.8: Python 3.9+ or PyPy 3.9+ is now required. .. warning:: PyMongo 4.11 drops support for MongoDB 3.6. PyMongo now supports MongoDB 4.0+. @@ -32,8 +32,8 @@ in this release. .. _PyMongo 4.11 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=40784 -Changes in Version 4.10.1 -------------------------- +Changes in Version 4.10.1 (2024/10/01) +-------------------------------------- Version 4.10.1 is a bug fix release. @@ -49,8 +49,8 @@ in this release. .. _PyMongo 4.10.1 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=40788 -Changes in Version 4.10.0 -------------------------- +Changes in Version 4.10.0 (2024/09/30) +-------------------------------------- - Added provisional **(BETA)** support for a new Binary BSON subtype (9) used for efficient storage and retrieval of vectors: densely packed arrays of numbers, all of the same type. @@ -67,8 +67,8 @@ in this release. .. _PyMongo 4.10 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=40553 -Changes in Version 4.9.2 -------------------------- +Changes in Version 4.9.2 (2024/10/02) +------------------------------------- - Fixed a bug where :class:`~pymongo.asynchronous.mongo_client.AsyncMongoClient` could deadlock. - Fixed a bug where PyMongo could fail to import on Windows if ``asyncio`` is misconfigured. @@ -83,8 +83,8 @@ in this release. .. _PyMongo 4.9.2 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=40732 -Changes in Version 4.9.1 -------------------------- +Changes in Version 4.9.1 (2024/09/18) +------------------------------------- - Add missing documentation about the fact the async API is in beta state. @@ -97,8 +97,8 @@ in this release. .. _PyMongo 4.9.1 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=40720 -Changes in Version 4.9.0 -------------------------- +Changes in Version 4.9 (2024/09/18) +----------------------------------- .. warning:: Driver support for MongoDB 3.6 reached end of life in April 2024. PyMongo 4.9 will be the last release to support MongoDB 3.6. @@ -173,8 +173,8 @@ in this release. .. _PyMongo 4.9 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=39940 -Changes in Version 4.8.0 -------------------------- +Changes in Version 4.8.0 (2024/06/26) +------------------------------------- .. warning:: PyMongo 4.8 drops support for Python 3.7 and PyPy 3.8: Python 3.8+ or PyPy 3.9+ is now required. @@ -209,8 +209,8 @@ in this release. .. _PyMongo 4.8 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=37057 -Changes in Version 4.7.3 -------------------------- +Changes in Version 4.7.3 (2024/06/04) +------------------------------------- Version 4.7.3 has further fixes for lazily loading modules. @@ -226,8 +226,8 @@ in this release. .. _PyMongo 4.7.3 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=39865 -Changes in Version 4.7.2 -------------------------- +Changes in Version 4.7.2 (2024/05/07) +------------------------------------- Version 4.7.2 fixes a bug introduced in 4.7.0: @@ -242,8 +242,8 @@ in this release. .. _PyMongo 4.7.2 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=39710 -Changes in Version 4.7.1 -------------------------- +Changes in Version 4.7.1 (2024/04/30) +------------------------------------- Version 4.7.1 fixes a bug introduced in 4.7.0: @@ -259,8 +259,8 @@ in this release. .. _PyMongo 4.7.1 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=39680 -Changes in Version 4.7 ------------------------- +Changes in Version 4.7.0 (2024/04/24) +------------------------------------- PyMongo 4.7 brings a number of improvements including: @@ -355,8 +355,8 @@ Unavoidable breaking changes - The "aws" extra now requires minimum version of ``1.1.0`` for ``pymongo_auth_aws``. -Changes in Version 4.6.3 ------------------------- +Changes in Version 4.6.3 (2024/03/27) +------------------------------------- PyMongo 4.6.3 fixes the following bug: @@ -370,8 +370,8 @@ in this release. .. _PyMongo 4.6.3 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=38360 -Changes in Version 4.6.2 ------------------------- +Changes in Version 4.6.2 (2024/02/21) +------------------------------------- PyMongo 4.6.2 fixes the following bug: @@ -386,8 +386,8 @@ in this release. .. _PyMongo 4.6.2 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=37906 -Changes in Version 4.6.1 ------------------------- +Changes in Version 4.6.1 (2023/11/29) +------------------------------------- PyMongo 4.6.1 fixes the following bug: @@ -401,8 +401,8 @@ in this release. .. _PyMongo 4.6.1 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=37138 -Changes in Version 4.6 ----------------------- +Changes in Version 4.6.0 (2023/11/01) +------------------------------------- PyMongo 4.6 brings a number of improvements including: @@ -441,8 +441,8 @@ in this release. .. _PyMongo 4.6 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=36542 -Changes in Version 4.5 ----------------------- +Changes in Version 4.5.0 (2023/08/22) +------------------------------------- PyMongo 4.5 brings a number of improvements including: @@ -477,8 +477,8 @@ in this release. .. _PYTHON-3824: https://jira.mongodb.org/browse/PYTHON-3824 .. _PYTHON-3846: https://jira.mongodb.org/browse/PYTHON-3846 -Changes in Version 4.4.1 ------------------------- +Changes in Version 4.4.1 (2023/07/13) +------------------------------------- Version 4.4.1 fixes the following bugs: @@ -497,8 +497,8 @@ in this release. .. _PYTHON-3800: https://jira.mongodb.org/browse/PYTHON-3800 .. _PyMongo 4.4.1 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=36329 -Changes in Version 4.4 ------------------------ +Changes in Version 4.4.0 (2023/06/21) +------------------------------------- PyMongo 4.4 brings a number of improvements including: @@ -539,8 +539,8 @@ in this release. .. _PYTHON-3717: https://jira.mongodb.org/browse/PYTHON-3717 .. _PYTHON-3718: https://jira.mongodb.org/browse/PYTHON-3718 -Changes in Version 4.3.3 ------------------------- +Changes in Version 4.3.3 (2022/11/17) +------------------------------------- Version 4.3.3 documents support for the following: @@ -567,8 +567,8 @@ in this release. .. _PYTHON-3508: https://jira.mongodb.org/browse/PYTHON-3508 .. _PyMongo 4.3.3 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=34709 -Changes in Version 4.3 (4.3.2) ------------------------------- +Changes in Version 4.3.2 (2022/10/18) +------------------------------------- Note: We withheld uploading tags 4.3.0 and 4.3.1 to PyPI due to a version handling error and a necessary documentation update. @@ -624,8 +624,8 @@ in this release. .. _PYTHON-3389: https://jira.mongodb.org/browse/PYTHON-3389 .. _PyMongo 4.3 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=33425 -Changes in Version 4.2 ----------------------- +Changes in Version 4.2.0 (2022/07/20) +------------------------------------- .. warning:: PyMongo 4.2 drops support for Python 3.6: Python 3.7+ is now required. @@ -713,8 +713,8 @@ in this release. .. _PYTHON-3187: https://jira.mongodb.org/browse/PYTHON-3187 .. _PyMongo 4.2 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=33196 -Changes in Version 4.1.1 -------------------------- +Changes in Version 4.1.1 (2022/04/13) +------------------------------------- Version 4.1.1 fixes a number of bugs: @@ -739,8 +739,8 @@ in this release. .. _PYTHON-3222: https://jira.mongodb.org/browse/PYTHON-3222 .. _PyMongo 4.1.1 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=33290 -Changes in Version 4.1 ----------------------- +Changes in Version 4.1 (2021/12/07) +----------------------------------- .. warning:: PyMongo 4.1 drops support for Python 3.6.0 and 3.6.1, Python 3.6.2+ is now required. @@ -794,8 +794,18 @@ in this release. .. _PYTHON-3186: https://jira.mongodb.org/browse/PYTHON-3186 .. _pymongo-stubs: https://github.com/mongodb-labs/pymongo-stubs -Changes in Version 4.0 ----------------------- +Changes in Version 4.0.2 (2022/03/03) +------------------------------------- + +- No changes + +Changes in Version 4.0.1 (2021/12/07) +------------------------------------- + +- No changes + +Changes in Version 4.0 (2021/11/29) +----------------------------------- .. warning:: PyMongo 4.0 drops support for Python 2.7, 3.4, and 3.5. @@ -1014,8 +1024,8 @@ in this release. .. _PyMongo 4.0 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=18463 .. _DBRef specification: https://github.com/mongodb/specifications/blob/5a8c8d7/source/dbref.rst -Changes in Version 3.13.0 -------------------------- +Changes in Version 3.13.0 (2022/11/01) +-------------------------------------- Version 3.13 provides an upgrade path to PyMongo 4.x. Most of the API changes from PyMongo 4.0 have been backported in a backward compatible way, allowing @@ -1087,8 +1097,8 @@ in this release. .. _PYTHON-3222: https://jira.mongodb.org/browse/PYTHON-3222 .. _PyMongo 3.13.0 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=31570 -Changes in Version 3.12.3 -------------------------- +Changes in Version 3.12.3 (2021/12/07) +-------------------------------------- Issues Resolved ............... @@ -1102,8 +1112,8 @@ in this release. .. _PYTHON-3028: https://jira.mongodb.org/browse/PYTHON-3028 .. _PyMongo 3.12.3 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=32505 -Changes in Version 3.12.2 -------------------------- +Changes in Version 3.12.2 (2021/11/29) +-------------------------------------- Issues Resolved ............... @@ -1122,8 +1132,8 @@ in this release. .. _PYTHON-3017: https://jira.mongodb.org/browse/PYTHON-3017 .. _PyMongo 3.12.2 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=32310 -Changes in Version 3.12.1 -------------------------- +Changes in Version 3.12.1 (2021/10/19) +-------------------------------------- Issues Resolved ............... @@ -1143,8 +1153,8 @@ in this release. .. _PYTHON-2866: https://jira.mongodb.org/browse/PYTHON-2866 .. _PyMongo 3.12.1 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=31527 -Changes in Version 3.12.0 -------------------------- +Changes in Version 3.12.0 (2021/07/13) +-------------------------------------- .. warning:: PyMongo 3.12.0 deprecates support for Python 2.7, 3.4 and 3.5. These Python versions will not be supported by PyMongo 4. @@ -1236,8 +1246,8 @@ in this release. .. _PyMongo 3.12.0 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=29594 -Changes in Version 3.11.3 -------------------------- +Changes in Version 3.11.3 (2021/02/02) +-------------------------------------- Issues Resolved ............... @@ -1251,8 +1261,8 @@ in this release. .. _PYTHON-2452: https://jira.mongodb.org/browse/PYTHON-2452 .. _PyMongo 3.11.3 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=30355 -Changes in Version 3.11.2 -------------------------- +Changes in Version 3.11.2 (2020/12/02) +-------------------------------------- Issues Resolved ............... @@ -1279,8 +1289,8 @@ in this release. .. _PYTHON-2443: https://jira.mongodb.org/browse/PYTHON-2443 .. _PyMongo 3.11.2 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=30315 -Changes in Version 3.11.1 -------------------------- +Changes in Version 3.11.1 (2020/11/17) +-------------------------------------- Version 3.11.1 adds support for Python 3.9 and includes a number of bugfixes. Highlights include: @@ -1313,8 +1323,8 @@ in this release. .. _PyMongo 3.11.1 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=29997 -Changes in Version 3.11.0 -------------------------- +Changes in Version 3.11.0 (2020/07/30) +-------------------------------------- Version 3.11 adds support for MongoDB 4.4 and includes a number of bug fixes. Highlights include: @@ -1408,8 +1418,8 @@ in this release. .. _PyMongo 3.11.0 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=24799 -Changes in Version 3.10.1 -------------------------- +Changes in Version 3.10.1 (2020/01/07) +-------------------------------------- Version 3.10.1 fixes the following issues discovered since the release of 3.10.0: @@ -1427,8 +1437,8 @@ in this release. .. _PyMongo 3.10.1 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=25039 -Changes in Version 3.10.0 -------------------------- +Changes in Version 3.10.0 (2019/12/10) +-------------------------------------- Version 3.10 includes a number of improvements and bug fixes. Highlights include: @@ -1454,8 +1464,8 @@ in this release. .. _PyMongo 3.10 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=23944 -Changes in Version 3.9.0 ------------------------- +Changes in Version 3.9.0 (2019/08/13) +------------------------------------- Version 3.9 adds support for MongoDB 4.2. Highlights include: @@ -1558,8 +1568,8 @@ in this release. .. _PyMongo 3.9 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=21787 -Changes in Version 3.8.0 ------------------------- +Changes in Version 3.8.0 (2019/04/22) +------------------------------------- .. warning:: PyMongo no longer supports Python 2.6. RHEL 6 users should install Python 2.7 or newer from `Red Hat Software Collections @@ -1637,8 +1647,8 @@ in this release. .. _PyMongo 3.8 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=19904 -Changes in Version 3.7.2 ------------------------- +Changes in Version 3.7.2 (2018/10/10) +------------------------------------- Version 3.7.2 fixes a few issues discovered since the release of 3.7.1. @@ -1661,8 +1671,8 @@ in this release. .. _PyMongo 3.7.2 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=21519 -Changes in Version 3.7.1 ------------------------- +Changes in Version 3.7.1 (2018/07/16) +------------------------------------- Version 3.7.1 fixes a few issues discovered since the release of 3.7.0. @@ -1681,8 +1691,8 @@ in this release. .. _PyMongo 3.7.1 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=21096 -Changes in Version 3.7.0 ------------------------- +Changes in Version 3.7.0 (2018/06/26) +------------------------------------- Version 3.7 adds support for MongoDB 4.0. Highlights include: @@ -1791,8 +1801,8 @@ in this release. .. _PyMongo 3.7 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=19287 -Changes in Version 3.6.1 ------------------------- +Changes in Version 3.6.1 (2018/03/01) +------------------------------------- Version 3.6.1 fixes bugs reported since the release of 3.6.0: @@ -1817,8 +1827,8 @@ in this release. .. _PyMongo 3.6.1 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=19438 -Changes in Version 3.6.0 ------------------------- +Changes in Version 3.6.0 (2017/08/23) +------------------------------------- Version 3.6 adds support for MongoDB 3.6, drops support for CPython 3.3 (PyPy3 is still supported), and drops support for MongoDB versions older than 2.6. If @@ -1889,8 +1899,8 @@ in this release. .. _PyMongo 3.6 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=18043 -Changes in Version 3.5.1 ------------------------- +Changes in Version 3.5.1 (2017/08/23) +------------------------------------- Version 3.5.1 fixes bugs reported since the release of 3.5.0: @@ -1908,8 +1918,8 @@ in this release. .. _PyMongo 3.5.1 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=18721 -Changes in Version 3.5 ----------------------- +Changes in Version 3.5.0 (2017/08/08) +------------------------------------- Version 3.5 implements a number of improvements and bug fixes: @@ -1999,8 +2009,8 @@ in this release. .. _PyMongo 3.5 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=17590 -Changes in Version 3.4 ----------------------- +Changes in Version 3.4.0 (2016/11/29) +------------------------------------- Version 3.4 implements the new server features introduced in MongoDB 3.4 and a whole lot more: @@ -2071,8 +2081,8 @@ in this release. .. _PyMongo 3.4 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=16594 -Changes in Version 3.3.1 ------------------------- +Changes in Version 3.3.1 (2016/10/27) +------------------------------------- Version 3.3.1 fixes a memory leak when decoding elements inside of a :class:`~bson.raw_bson.RawBSONDocument`. @@ -2085,8 +2095,8 @@ in this release. .. _PyMongo 3.3.1 release notes in Jira: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=17636 -Changes in Version 3.3 ----------------------- +Changes in Version 3.3.0 (2016/07/12) +------------------------------------- Version 3.3 adds the following major new features: @@ -2112,8 +2122,8 @@ in this release. .. _PyMongo 3.3 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=16005 -Changes in Version 3.2.2 ------------------------- +Changes in Version 3.2.2 (2016/03/15) +------------------------------------- Version 3.2.2 fixes a few issues reported since the release of 3.2.1, including a fix for using the ``connect`` option in the MongoDB URI and support for setting @@ -2128,8 +2138,8 @@ in this release. .. _PyMongo 3.2.2 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=16538 -Changes in Version 3.2.1 ------------------------- +Changes in Version 3.2.1 (2016/02/02) +------------------------------------- Version 3.2.1 fixes a few issues reported since the release of 3.2, including running the mapreduce command twice when calling the @@ -2146,8 +2156,8 @@ in this release. .. _PyMongo 3.2.1 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=16312 -Changes in Version 3.2 ----------------------- +Changes in Version 3.2 (2015/12/07) +----------------------------------- Version 3.2 implements the new server features introduced in MongoDB 3.2. @@ -2179,8 +2189,8 @@ in this release. .. _PyMongo 3.2 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=15612 -Changes in Version 3.1.1 ------------------------- +Changes in Version 3.1.1 (2015/11/17) +------------------------------------- Version 3.1.1 fixes a few issues reported since the release of 3.1, including a regression in error handling for oversize command documents and interrupt @@ -2194,8 +2204,8 @@ in this release. .. _PyMongo 3.1.1 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=16211 -Changes in Version 3.1 ----------------------- +Changes in Version 3.1 (2015/11/02) +----------------------------------- Version 3.1 implements a few new features and fixes bugs reported since the release of 3.0.3. @@ -2226,8 +2236,8 @@ in this release. .. _PyMongo 3.1 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=14796 -Changes in Version 3.0.3 ------------------------- +Changes in Version 3.0.3 (2015/06/30) +------------------------------------- Version 3.0.3 fixes issues reported since the release of 3.0.2, including a feature breaking bug in the GSSAPI implementation. @@ -2240,8 +2250,8 @@ in this release. .. _PyMongo 3.0.3 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=15528 -Changes in Version 3.0.2 ------------------------- +Changes in Version 3.0.2 (2015/05/12) +------------------------------------- Version 3.0.2 fixes issues reported since the release of 3.0.1, most importantly a bug that could route operations to replica set members @@ -2258,8 +2268,8 @@ in this release. .. _PyMongo 3.0.2 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=15430 -Changes in Version 3.0.1 ------------------------- +Changes in Version 3.0.1 (2015/04/21) +------------------------------------- Version 3.0.1 fixes issues reported since the release of 3.0, most importantly a bug in GridFS.delete that could prevent file chunks from @@ -2273,8 +2283,8 @@ in this release. .. _PyMongo 3.0.1 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=15322 -Changes in Version 3.0 ----------------------- +Changes in Version 3.0 (2015/04/07) +----------------------------------- PyMongo 3.0 is a partial rewrite of PyMongo bringing a large number of improvements: @@ -2719,8 +2729,8 @@ in this release. .. _PyMongo 3.0 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=12501 -Changes in Version 2.9.5 ------------------------- +Changes in Version 2.9.5 (2017/06/30) +------------------------------------- Version 2.9.5 works around ssl module deprecations in Python 3.6, and expected future ssl module deprecations. It also fixes bugs found since the release of @@ -2742,8 +2752,8 @@ in this release. .. _PyMongo 2.9.5 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=17605 -Changes in Version 2.9.4 ------------------------- +Changes in Version 2.9.4 (2016/09/30) +------------------------------------- Version 2.9.4 fixes issues reported since the release of 2.9.3. @@ -2761,8 +2771,8 @@ in this release. .. _PyMongo 2.9.4 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=16885 -Changes in Version 2.9.3 ------------------------- +Changes in Version 2.9.3 (2016/03/15) +------------------------------------- Version 2.9.3 fixes a few issues reported since the release of 2.9.2 including thread safety issues in :meth:`~pymongo.collection.Collection.ensure_index`, @@ -2777,8 +2787,8 @@ in this release. .. _PyMongo 2.9.3 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=16539 -Changes in Version 2.9.2 ------------------------- +Changes in Version 2.9.2 (2016/02/16) +------------------------------------- Version 2.9.2 restores Python 3.1 support, which was broken in PyMongo 2.8. It improves an error message when decoding BSON as well as fixes a couple other @@ -2795,8 +2805,8 @@ in this release. .. _PyMongo 2.9.2 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=16303 -Changes in Version 2.9.1 ------------------------- +Changes in Version 2.9.1 (2015/11/17) +------------------------------------- Version 2.9.1 fixes two interrupt handling issues in the C extensions and adapts a test case for a behavior change in MongoDB 3.2. @@ -2809,8 +2819,8 @@ in this release. .. _PyMongo 2.9.1 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=16208 -Changes in Version 2.9 ----------------------- +Changes in Version 2.9 (2015/09/30) +----------------------------------- Version 2.9 provides an upgrade path to PyMongo 3.x. Most of the API changes from PyMongo 3.0 have been backported in a backward compatible way, allowing @@ -2887,8 +2897,8 @@ in this release. .. _PyMongo 2.9 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=14795 -Changes in Version 2.8.1 ------------------------- +Changes in Version 2.8.1 (2015/05/11) +------------------------------------- Version 2.8.1 fixes a number of issues reported since the release of PyMongo 2.8. It is a recommended upgrade for all users of PyMongo 2.x. @@ -2901,8 +2911,8 @@ in this release. .. _PyMongo 2.8.1 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=15324 -Changes in Version 2.8 ----------------------- +Changes in Version 2.8 (2015/01/28) +----------------------------------- Version 2.8 is a major release that provides full support for MongoDB 3.0 and fixes a number of bugs. @@ -2951,8 +2961,8 @@ in this release. .. _PyMongo 2.8 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=14223 -Changes in Version 2.7.2 ------------------------- +Changes in Version 2.7.2 (2014/07/29) +------------------------------------- Version 2.7.2 includes fixes for upsert reporting in the bulk API for MongoDB versions previous to 2.6, a regression in how son manipulators are applied in @@ -2968,8 +2978,8 @@ in this release. .. _PyMongo 2.7.2 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=14005 -Changes in Version 2.7.1 ------------------------- +Changes in Version 2.7.1 (2014/05/23) +------------------------------------- Version 2.7.1 fixes a number of issues reported since the release of 2.7, most importantly a fix for creating indexes and manipulating users through @@ -2983,8 +2993,8 @@ in this release. .. _PyMongo 2.7.1 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=13823 -Changes in Version 2.7 ----------------------- +Changes in Version 2.7 (2014/04/03) +----------------------------------- PyMongo 2.7 is a major release with a large number of new features and bug fixes. Highlights include: @@ -3020,8 +3030,8 @@ in this release. .. _PyMongo 2.7 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=12892 -Changes in Version 2.6.3 ------------------------- +Changes in Version 2.6.3 (2013/10/11) +------------------------------------- Version 2.6.3 fixes issues reported since the release of 2.6.2, most importantly a semaphore leak when a connection to the server fails. @@ -3034,8 +3044,8 @@ in this release. .. _PyMongo 2.6.3 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=13098 -Changes in Version 2.6.2 ------------------------- +Changes in Version 2.6.2 (2013/09/06) +------------------------------------- Version 2.6.2 fixes a :exc:`TypeError` problem when max_pool_size=None is used in Python 3. @@ -3048,8 +3058,8 @@ in this release. .. _PyMongo 2.6.2 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=12910 -Changes in Version 2.6.1 ------------------------- +Changes in Version 2.6.1 (2013/09/03) +------------------------------------- Version 2.6.1 fixes a reference leak in the :meth:`~pymongo.collection.Collection.insert` method. @@ -3062,8 +3072,8 @@ in this release. .. _PyMongo 2.6.1 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=12905 -Changes in Version 2.6 ----------------------- +Changes in Version 2.6 (2013/08/19) +----------------------------------- Version 2.6 includes some frequently requested improvements and adds support for some early MongoDB 2.6 features. @@ -3111,8 +3121,8 @@ in this release. .. _PyMongo 2.6 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=12380 -Changes in Version 2.5.2 ------------------------- +Changes in Version 2.5.2 (2013/06/01) +------------------------------------- Version 2.5.2 fixes a NULL pointer dereference issue when decoding an invalid :class:`~bson.dbref.DBRef`. @@ -3125,8 +3135,8 @@ in this release. .. _PyMongo 2.5.2 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=12581 -Changes in Version 2.5.1 ------------------------- +Changes in Version 2.5.1 (2013/05/13) +------------------------------------- Version 2.5.1 is a minor release that fixes issues discovered after the release of 2.5. Most importantly, this release addresses some race @@ -3140,8 +3150,8 @@ in this release. .. _PyMongo 2.5.1 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=12484 -Changes in Version 2.5 ----------------------- +Changes in Version 2.5 (2013/03/22) +----------------------------------- Version 2.5 includes changes to support new features in MongoDB 2.4. @@ -3164,8 +3174,8 @@ in this release. .. _PyMongo 2.5 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=11981 -Changes in Version 2.4.2 ------------------------- +Changes in Version 2.4.2 (2013/01/23) +------------------------------------- Version 2.4.2 is a minor release that fixes issues discovered after the release of 2.4.1. Most importantly, PyMongo will no longer select a replica @@ -3179,8 +3189,8 @@ in this release. .. _PyMongo 2.4.2 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=12299 -Changes in Version 2.4.1 ------------------------- +Changes in Version 2.4.1 (2012/12/06) +------------------------------------- Version 2.4.1 is a minor release that fixes issues discovered after the release of 2.4. Most importantly, this release fixes a regression using @@ -3195,8 +3205,8 @@ in this release. .. _PyMongo 2.4.1 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=12286 -Changes in Version 2.4 ----------------------- +Changes in Version 2.4 (2012/11/27) +----------------------------------- Version 2.4 includes a few important new features and a large number of bug fixes. @@ -3245,8 +3255,8 @@ in this release. .. _PyMongo 2.4 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=11485 -Changes in Version 2.3 ----------------------- +Changes in Version 2.3 (2012/08/29) +----------------------------------- Version 2.3 adds support for new features and behavior changes in MongoDB 2.2. @@ -3279,8 +3289,8 @@ in this release. .. _PyMongo 2.3 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=11146 -Changes in Version 2.2.1 ------------------------- +Changes in Version 2.2.1 (2012/07/06) +------------------------------------- Version 2.2.1 is a minor release that fixes issues discovered after the release of 2.2. Most importantly, this release fixes an incompatibility @@ -3295,8 +3305,8 @@ in this release. .. _PyMongo 2.2.1 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=11185 -Changes in Version 2.2 ----------------------- +Changes in Version 2.2 (2012/04/30) +----------------------------------- Version 2.2 adds a few more frequently requested features and fixes a number of bugs. @@ -3340,8 +3350,8 @@ in this release. .. _PyMongo 2.2 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=10584 -Changes in Version 2.1.1 ------------------------- +Changes in Version 2.1.1 (2012/01/04) +------------------------------------- Version 2.1.1 is a minor release that fixes a few issues discovered after the release of 2.1. You can now use @@ -3360,8 +3370,8 @@ in this release. .. _PyMongo 2.1.1 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?version=11081&styleName=Html&projectId=10004 -Changes in Version 2.1 ----------------------- +Changes in Version 2.1 (2011/12/07) +----------------------------------- Version 2.1 adds a few frequently requested features and includes the usual round of bug fixes and improvements. @@ -3403,8 +3413,8 @@ See the `PyMongo 2.1 release notes in JIRA`_ for the list of resolved issues in .. _PyMongo 2.1 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=10583 -Changes in Version 2.0.1 ------------------------- +Changes in Version 2.0.1 (2011/08/15) +------------------------------------- Version 2.0.1 fixes a regression in :class:`~gridfs.grid_file.GridIn` when writing pre-chunked strings. Thanks go to Alexey Borzenkov for reporting the @@ -3416,8 +3426,8 @@ Issues Resolved - `PYTHON-271 `_: Regression in GridFS leads to serious loss of data. -Changes in Version 2.0 ----------------------- +Changes in Version 2.0 (2011/08/05) +----------------------------------- Version 2.0 adds a large number of features and fixes a number of issues. @@ -3474,8 +3484,8 @@ See the `PyMongo 2.0 release notes in JIRA`_ for the list of resolved issues in .. _PyMongo 2.0 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=10274 -Changes in Version 1.11 ------------------------ +Changes in Version 1.11 (2011/05/05) +------------------------------------ Version 1.11 adds a few new features and fixes a few more bugs. @@ -3528,8 +3538,8 @@ Issues resolved - `PYTHON-138 `_: Find method for GridFS -Changes in Version 1.10.1 -------------------------- +Changes in Version 1.10.1 (2011/04/07) +-------------------------------------- Version 1.10.1 is primarily a bugfix release. It fixes a regression in version 1.10 that broke pickling of ObjectIds. A number of other bugs @@ -3576,8 +3586,8 @@ Issues resolved - `PYTHON-113 `_: Redunducy in MasterSlaveConnection -Changes in Version 1.10 ------------------------ +Changes in Version 1.10 (2011/03/30) +------------------------------------ Version 1.10 includes changes to support new features in MongoDB 1.8.x. Highlights include a modified map/reduce API including an inline map/reduce @@ -3617,8 +3627,8 @@ Issues resolved - PYTHON-166: Fixes a concurrency issue. - PYTHON-158: Add code and err string to ``db assertion`` messages. -Changes in Version 1.9 ----------------------- +Changes in Version 1.9 (2010/09/28) +----------------------------------- Version 1.9 adds a new package to the PyMongo distribution, :mod:`bson`. :mod:`bson` contains all of the `BSON @@ -3699,8 +3709,8 @@ rather than :class:`pymongo.errors.PyMongoError`. mode. - added :class:`uuid` support to :mod:`~bson.json_util`. -Changes in Version 1.8.1 ------------------------- +Changes in Version 1.8.1 (2010/08/13) +------------------------------------- - fixed a typo in the C extension that could cause safe-mode operations to report a failure (:class:`SystemError`) even when none @@ -3708,8 +3718,8 @@ Changes in Version 1.8.1 - added a :meth:`__ne__` implementation to any class where we define :meth:`__eq__`. -Changes in Version 1.8 ----------------------- +Changes in Version 1.8 (2010/08/05) +----------------------------------- Version 1.8 adds support for connecting to replica sets, specifying per-operation values for ``w`` and ``wtimeout``, and decoding to @@ -3749,8 +3759,8 @@ timezone-aware datetimes. :class:`~pymongo.errors.OperationFailure` exceptions. - fixed serialization of int and float subclasses in the C extension. -Changes in Version 1.7 ----------------------- +Changes in Version 1.7 (2010/06/17) +----------------------------------- Version 1.7 is a recommended upgrade for all PyMongo users. The full release notes are below, and some more in depth discussion of the @@ -3799,8 +3809,8 @@ highlights is `here - don't transparently map ``"filename"`` key to :attr:`name` attribute for GridFS. -Changes in Version 1.6 ----------------------- +Changes in Version 1.6 (2010/04/14) +----------------------------------- The biggest change in version 1.6 is a complete re-implementation of :mod:`gridfs` with a lot of improvements over the old @@ -3821,13 +3831,13 @@ to be modified before upgrading to 1.6. on non-existent collections. - disallow empty bulk inserts. -Changes in Version 1.5.2 ------------------------- +Changes in Version 1.5.2 (2010/03/31) +------------------------------------- - fixed response handling to ignore unknown response flags in queries. - handle server versions containing '-pre-'. -Changes in Version 1.5.1 ------------------------- +Changes in Version 1.5.1 (2010/03/17) +------------------------------------- - added :data:`~gridfs.grid_file.GridFile._id` property for :class:`~gridfs.grid_file.GridFile` instances. - fix for making a :class:`~pymongo.connection.Connection` (with @@ -3840,8 +3850,8 @@ Changes in Version 1.5.1 - improvements to Python code caching in C extension - should improve behavior on mod_wsgi. -Changes in Version 1.5 ----------------------- +Changes in Version 1.5 (2010/03/10) +----------------------------------- - added subtype constants to :mod:`~bson.binary` module. - DEPRECATED ``options`` argument to :meth:`~pymongo.collection.Collection` and @@ -3875,8 +3885,8 @@ Changes in Version 1.5 - added :class:`~gridfs.errors.GridFSError` as base class for :mod:`gridfs` exceptions. -Changes in Version 1.4 ----------------------- +Changes in Version 1.4 (2010/01/17) +----------------------------------- Perhaps the most important change in version 1.4 is that we have decided to **no longer support Python 2.3**. The most immediate reason @@ -3937,8 +3947,8 @@ Other changes: - allow the NULL byte in strings and disallow it in key names or regex patterns -Changes in Version 1.3 ----------------------- +Changes in Version 1.3 (2009/12/16) +----------------------------------- - DEPRECATED running :meth:`~pymongo.collection.Collection.group` as :meth:`~pymongo.database.Database.eval`, also changed default for :meth:`~pymongo.collection.Collection.group` to running as a command @@ -3963,8 +3973,8 @@ Changes in Version 1.3 usual, as it carries some performance implications. - added :meth:`~pymongo.connection.Connection.disconnect` -Changes in Version 1.2.1 ------------------------- +Changes in Version 1.2.1 (2009/12/10) +------------------------------------- - added :doc:`changelog` to docs - added ``setup.py doc --test`` to run doctests for tutorial, examples - moved most examples to Sphinx docs (and remove from *examples/* @@ -3975,8 +3985,8 @@ Changes in Version 1.2.1 characters - allow :class:`unicode` instances for :class:`~bson.objectid.ObjectId` init -Changes in Version 1.2 ----------------------- +Changes in Version 1.2 (2009/12/09) +----------------------------------- - ``spec`` parameter for :meth:`~pymongo.collection.Collection.remove` is now optional to allow for deleting all documents in a :class:`~pymongo.collection.Collection` @@ -4002,15 +4012,15 @@ Changes in Version 1.2 - some minor fixes for installation process - added support for datetime and regex in :mod:`~bson.json_util` -Changes in Version 1.1.2 ------------------------- +Changes in Version 1.1.2 (2009/11/23) +------------------------------------- - improvements to :meth:`~pymongo.collection.Collection.insert` speed (using C for insert message creation) - use random number for request_id - fix some race conditions with :class:`~pymongo.errors.AutoReconnect` -Changes in Version 1.1.1 ------------------------- +Changes in Version 1.1.1 (2009/11/14) +------------------------------------- - added ``multi`` parameter for :meth:`~pymongo.collection.Collection.update` - fix unicode regex patterns with C extension @@ -4023,8 +4033,8 @@ Changes in Version 1.1.1 to ``True`` due to performance regression - switch documentation to Sphinx -Changes in Version 1.1 ----------------------- +Changes in Version 1.1 (2009/10/21) +----------------------------------- - added :meth:`__hash__` for :class:`~bson.dbref.DBRef` and :class:`~bson.objectid.ObjectId` - bulk :meth:`~pymongo.collection.Collection.insert` works with any @@ -4039,8 +4049,8 @@ Changes in Version 1.1 - added ``safe`` parameter for :meth:`~pymongo.collection.Collection.remove` - added ``tailable`` parameter for :meth:`~pymongo.collection.Collection.find` -Changes in Version 1.0 ----------------------- +Changes in Version 1.0 (2009/09/30) +----------------------------------- - fixes for :class:`~pymongo.master_slave_connection.MasterSlaveConnection` - added ``finalize`` parameter for :meth:`~pymongo.collection.Collection.group` @@ -4050,17 +4060,17 @@ Changes in Version 1.0 :meth:`~pymongo.cursor.Cursor.__len__` for :class:`~pymongo.cursor.Cursor` instances -Changes in Version 0.16 ------------------------ +Changes in Version 0.16 (2009/09/16) +------------------------------------ - support for encoding/decoding :class:`uuid.UUID` instances - fix for :meth:`~pymongo.cursor.Cursor.explain` with limits -Changes in Version 0.15.2 -------------------------- +Changes in Version 0.15.2 (2009/09/09) +-------------------------------------- - documentation changes only -Changes in Version 0.15.1 -------------------------- +Changes in Version 0.15.1 (2009/09/02) +-------------------------------------- - various performance improvements - API CHANGE no longer need to specify direction for :meth:`~pymongo.collection.Collection.create_index` and @@ -4069,8 +4079,8 @@ Changes in Version 0.15.1 - support for encoding :class:`tuple` instances as :class:`list` instances -Changes in Version 0.15 ------------------------ +Changes in Version 0.15 (2009/08/26) +------------------------------------ - fix string representation of :class:`~bson.objectid.ObjectId` instances - added ``timeout`` parameter for @@ -4078,25 +4088,25 @@ Changes in Version 0.15 - allow scope for ``reduce`` function in :meth:`~pymongo.collection.Collection.group` -Changes in Version 0.14.2 -------------------------- +Changes in Version 0.14.2 (2009/08/24) +-------------------------------------- - minor bugfixes -Changes in Version 0.14.1 -------------------------- +Changes in Version 0.14.1 (2009/08/21) +-------------------------------------- - :meth:`~gridfs.grid_file.GridFile.seek` and :meth:`~gridfs.grid_file.GridFile.tell` for (read mode) :class:`~gridfs.grid_file.GridFile` instances -Changes in Version 0.14 ------------------------ +Changes in Version 0.14 (2009/08/19) +------------------------------------ - support for long in :class:`~bson.BSON` - added :meth:`~pymongo.collection.Collection.rename` - added ``snapshot`` parameter for :meth:`~pymongo.collection.Collection.find` -Changes in Version 0.13 ------------------------ +Changes in Version 0.13 (2009/07/29) +------------------------------------ - better :class:`~pymongo.master_slave_connection.MasterSlaveConnection` support @@ -4106,38 +4116,38 @@ Changes in Version 0.13 - DEPRECATED passing an index name to :meth:`~pymongo.cursor.Cursor.hint` -Changes in Version 0.12 ------------------------ +Changes in Version 0.12 (2009/07/08) +------------------------------------ - improved :class:`~bson.objectid.ObjectId` generation - added :class:`~pymongo.errors.AutoReconnect` exception for when reconnection is possible - make :mod:`gridfs` thread-safe - fix for :mod:`gridfs` with non :class:`~bson.objectid.ObjectId` ``_id`` -Changes in Version 0.11.3 -------------------------- +Changes in Version 0.11.3 (2009/06/18) +-------------------------------------- - don't allow NULL bytes in string encoder - fixes for Python 2.3 -Changes in Version 0.11.2 -------------------------- +Changes in Version 0.11.2 (2009/06/08) +-------------------------------------- - PEP 8 - updates for :meth:`~pymongo.collection.Collection.group` - VS build -Changes in Version 0.11.1 -------------------------- +Changes in Version 0.11.1 (2009/06/04) +-------------------------------------- - fix for connection pooling under Python 2.5 -Changes in Version 0.11 ------------------------ +Changes in Version 0.11 (2009/06/03) +------------------------------------ - better build failure detection - driver support for selecting fields in sub-documents - disallow insertion of invalid key names - added ``timeout`` parameter for :meth:`~pymongo.connection.Connection` -Changes in Version 0.10.3 -------------------------- +Changes in Version 0.10.3 (2009/05/27) +-------------------------------------- - fix bug with large :meth:`~pymongo.cursor.Cursor.limit` - better exception when modules get reloaded out from underneath the C extension @@ -4145,22 +4155,22 @@ Changes in Version 0.10.3 :class:`~pymongo.collection.Collection` or :class:`~pymongo.database.Database` instance -Changes in Version 0.10.2 -------------------------- +Changes in Version 0.10.2 (2009/05/22) +-------------------------------------- - support subclasses of :class:`dict` in C encoder -Changes in Version 0.10.1 -------------------------- +Changes in Version 0.10.1 (2009/05/18) +-------------------------------------- - alias :class:`~pymongo.connection.Connection` as :attr:`pymongo.Connection` - raise an exception rather than silently overflowing in encoder -Changes in Version 0.10 ------------------------ +Changes in Version 0.10 (2009/05/14) +------------------------------------ - added :meth:`~pymongo.collection.Collection.ensure_index` -Changes in Version 0.9.7 ------------------------- +Changes in Version 0.9.7 (2009/05/13) +------------------------------------- - allow sub-collections of *$cmd* as valid :class:`~pymongo.collection.Collection` names - add version as :attr:`pymongo.version` From 26a61c8c480cdaf4a0ff10c74d578c4b485081d0 Mon Sep 17 00:00:00 2001 From: Casey Clements Date: Wed, 23 Oct 2024 08:24:59 -0400 Subject: [PATCH 065/182] PYTHON-2926 Updated signature of Binary.from_vector to take a BinaryVector (#1963) --- bson/binary.py | 25 ++++++++++++++++++------- test/test_bson.py | 20 +++++++++++++++++++- 2 files changed, 37 insertions(+), 8 deletions(-) diff --git a/bson/binary.py b/bson/binary.py index 96b61b6dab..f03173a8ef 100644 --- a/bson/binary.py +++ b/bson/binary.py @@ -16,7 +16,7 @@ import struct from dataclasses import dataclass from enum import Enum -from typing import TYPE_CHECKING, Any, Sequence, Tuple, Type, Union +from typing import TYPE_CHECKING, Any, Optional, Sequence, Tuple, Type, Union from uuid import UUID """Tools for representing BSON binary data. @@ -400,24 +400,35 @@ def as_uuid(self, uuid_representation: int = UuidRepresentation.STANDARD) -> UUI @classmethod def from_vector( cls: Type[Binary], - vector: list[int, float], - dtype: BinaryVectorDtype, - padding: int = 0, + vector: Union[BinaryVector, list[int, float]], + dtype: Optional[BinaryVectorDtype] = None, + padding: Optional[int] = None, ) -> Binary: - """**(BETA)** Create a BSON :class:`~bson.binary.Binary` of Vector subtype from a list of Numbers. + """**(BETA)** Create a BSON :class:`~bson.binary.Binary` of Vector subtype. To interpret the representation of the numbers, a data type must be included. See :class:`~bson.binary.BinaryVectorDtype` for available types and descriptions. The dtype and padding are prepended to the binary data's value. - :param vector: List of values + :param vector: Either a List of values, or a :class:`~bson.binary.BinaryVector` dataclass. :param dtype: Data type of the values :param padding: For fractional bytes, number of bits to ignore at end of vector. :return: Binary packed data identified by dtype and padding. .. versionadded:: 4.10 """ + if isinstance(vector, BinaryVector): + if dtype or padding: + raise ValueError( + "The first argument, vector, has type BinaryVector. " + "dtype or padding cannot be separately defined, but were." + ) + dtype = vector.dtype + padding = vector.padding + vector = vector.data # type: ignore + + padding = 0 if padding is None else padding if dtype == BinaryVectorDtype.INT8: # pack ints in [-128, 127] as signed int8 format_str = "b" if padding: @@ -432,7 +443,7 @@ def from_vector( raise NotImplementedError("%s not yet supported" % dtype) metadata = struct.pack(" BinaryVector: diff --git a/test/test_bson.py b/test/test_bson.py index 96aa897d19..5dc1377bcd 100644 --- a/test/test_bson.py +++ b/test/test_bson.py @@ -51,7 +51,13 @@ is_valid, json_util, ) -from bson.binary import USER_DEFINED_SUBTYPE, Binary, BinaryVectorDtype, UuidRepresentation +from bson.binary import ( + USER_DEFINED_SUBTYPE, + Binary, + BinaryVector, + BinaryVectorDtype, + UuidRepresentation, +) from bson.code import Code from bson.codec_options import CodecOptions, DatetimeConversion from bson.datetime_ms import _DATETIME_ERROR_SUGGESTION @@ -785,6 +791,18 @@ def test_vector(self): else: self.fail("Failed to raise an exception.") + # Test form of Binary.from_vector(BinaryVector) + + assert padded_vec == Binary.from_vector( + BinaryVector(list_vector, BinaryVectorDtype.PACKED_BIT, padding) + ) + assert binary_vector == Binary.from_vector( + BinaryVector(list_vector, BinaryVectorDtype.INT8) + ) + assert float_binary == Binary.from_vector( + BinaryVector(list_vector, BinaryVectorDtype.FLOAT32) + ) + def test_unicode_regex(self): """Tests we do not get a segfault for C extension on unicode RegExs. This had been happening. From 5141a7c5c0e038dae59fe1bebe0cb8c049f32abd Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Wed, 23 Oct 2024 09:32:35 -0500 Subject: [PATCH 066/182] PYTHON-4896 Use shrub.py for other hosts tests (#1962) --- .evergreen/config.yml | 288 +++++++++++++++----------- .evergreen/scripts/generate_config.py | 60 +++++- 2 files changed, 221 insertions(+), 127 deletions(-) diff --git a/.evergreen/config.yml b/.evergreen/config.yml index e357f02f2b..4868096e83 100644 --- a/.evergreen/config.yml +++ b/.evergreen/config.yml @@ -1830,15 +1830,6 @@ tasks: # and then run our test suite on the vm. export GCPOIDC_TEST_CMD="OIDC_ENV=gcp ./.evergreen/run-mongodb-oidc-test.sh" bash $DRIVERS_TOOLS/.evergreen/auth_oidc/gcp/run-driver-test.sh - - - name: "test-fips-standalone" - tags: ["fips"] - commands: - - func: "bootstrap mongo-orchestration" - vars: - VERSION: "latest" - TOPOLOGY: "server" - - func: "run tests" # }}} - name: "coverage-report" tags: ["coverage"] @@ -2004,72 +1995,17 @@ axes: - id: platform display_name: OS values: - - id: macos - display_name: "macOS" - run_on: macos-14 - variables: - skip_EC2_auth_test: true - skip_ECS_auth_test: true - skip_web_identity_auth_test: true - # CSOT tests are unreliable on our slow macOS hosts. - SKIP_CSOT_TESTS: true - - id: macos-arm64 - display_name: "macOS Arm64" - run_on: macos-14-arm64 - variables: - skip_EC2_auth_test: true - skip_ECS_auth_test: true - skip_web_identity_auth_test: true - # CSOT tests are unreliable on our slow macOS hosts. - SKIP_CSOT_TESTS: true - - id: rhel7 - display_name: "RHEL 7.x" - run_on: rhel79-small - batchtime: 10080 # 7 days - id: rhel8 display_name: "RHEL 8.x" run_on: rhel8.8-small batchtime: 10080 # 7 days - - id: rhel9-fips - display_name: "RHEL 9 FIPS" - run_on: rhel92-fips - batchtime: 10080 # 7 days - - id: ubuntu-22.04 - display_name: "Ubuntu 22.04" - run_on: ubuntu2204-small - batchtime: 10080 # 7 days - - id: ubuntu-20.04 - display_name: "Ubuntu 20.04" - run_on: ubuntu2004-small - batchtime: 10080 # 7 days - - id: rhel8-zseries - display_name: "RHEL 8 (zSeries)" - run_on: rhel8-zseries-small - batchtime: 10080 # 7 days - variables: - SKIP_HATCH: true - - id: rhel8-power8 - display_name: "RHEL 8 (POWER8)" - run_on: rhel8-power-small - batchtime: 10080 # 7 days - variables: - SKIP_HATCH: true - - id: rhel8-arm64 - display_name: "RHEL 8 (ARM64)" - run_on: rhel82-arm64-small - batchtime: 10080 # 7 days - variables: - id: windows display_name: "Windows 64" run_on: windows-64-vsMulti-small batchtime: 10080 # 7 days - variables: - skip_ECS_auth_test: true - skip_EC2_auth_test: true - skip_web_identity_auth_test: true - venv_bin_dir: "Scripts" - # CSOT tests are unreliable on our slow Windows hosts. - SKIP_CSOT_TESTS: true + - id: macos + display_name: "macOS" + run_on: macos-14 # Test with authentication? - id: auth @@ -2147,30 +2083,6 @@ axes: variables: PYTHON_BINARY: "/opt/python/pypy3.10/bin/pypy3" - - id: python-version-windows - display_name: "Python" - values: - - id: "3.9" - display_name: "Python 3.9" - variables: - PYTHON_BINARY: "C:/python/Python39/python.exe" - - id: "3.10" - display_name: "Python 3.10" - variables: - PYTHON_BINARY: "C:/python/Python310/python.exe" - - id: "3.11" - display_name: "Python 3.11" - variables: - PYTHON_BINARY: "C:/python/Python311/python.exe" - - id: "3.12" - display_name: "Python 3.12" - variables: - PYTHON_BINARY: "C:/python/Python312/python.exe" - - id: "3.13" - display_name: "Python 3.13" - variables: - PYTHON_BINARY: "C:/python/Python313/python.exe" - buildvariants: # Server Tests. - name: test-rhel8-py3.9-auth-ssl-cov @@ -3889,37 +3801,175 @@ buildvariants: skip_web_identity_auth_test: "true" PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 -- matrix_name: "tests-fips" - matrix_spec: - platform: - - rhel9-fips - auth: "auth" - ssl: "ssl" - display_name: "${platform} ${auth} ${ssl}" +# Other hosts tests. +- name: openssl-1.0.2-rhel7-py3.9-auth-ssl tasks: - - "test-fips-standalone" - -# Test one server version with zSeries, POWER8, and ARM. -- matrix_name: "test-different-cpu-architectures" - matrix_spec: - platform: - - rhel8-zseries # Added in 5.0.8 (SERVER-44074) - - rhel8-power8 # Added in 4.2.7 (SERVER-44072) - - rhel8-arm64 # Added in 4.4.2 (SERVER-48282) - auth-ssl: "*" - display_name: "${platform} ${auth-ssl}" + - name: .5.0 .standalone + display_name: OpenSSL 1.0.2 RHEL7 py3.9 Auth SSL + run_on: + - rhel79-small + batchtime: 10080 + expansions: + SKIP_HATCH: "true" + AUTH: auth + SSL: ssl + PYTHON_BINARY: /opt/python/3.9/bin/python3 +- name: openssl-1.0.2-rhel7-py3.9-noauth-ssl tasks: - - ".6.0" - -- matrix_name: "tests-python-version-supports-openssl-102-test-ssl" - matrix_spec: - platform: rhel7 - # Python 3.10+ requires OpenSSL 1.1.1+ - python-version: ["3.9"] - auth-ssl: "*" - display_name: "OpenSSL 1.0.2 ${python-version} ${platform} ${auth-ssl}" + - name: .5.0 .standalone + display_name: OpenSSL 1.0.2 RHEL7 py3.9 NoAuth SSL + run_on: + - rhel79-small + batchtime: 10080 + expansions: + SKIP_HATCH: "true" + AUTH: noauth + SSL: ssl + PYTHON_BINARY: /opt/python/3.9/bin/python3 +- name: openssl-1.0.2-rhel7-py3.9-noauth-nossl + tasks: + - name: .5.0 .standalone + display_name: OpenSSL 1.0.2 RHEL7 py3.9 NoAuth NoSSL + run_on: + - rhel79-small + batchtime: 10080 + expansions: + SKIP_HATCH: "true" + AUTH: noauth + SSL: nossl + PYTHON_BINARY: /opt/python/3.9/bin/python3 +- name: other-hosts-rhel9-fips-auth-ssl + tasks: + - name: .6.0 .standalone + display_name: Other hosts RHEL9-FIPS Auth SSL + run_on: + - rhel92-fips + batchtime: 10080 + expansions: + SKIP_HATCH: "true" + AUTH: auth + SSL: ssl +- name: other-hosts-rhel9-fips-noauth-ssl + tasks: + - name: .6.0 .standalone + display_name: Other hosts RHEL9-FIPS NoAuth SSL + run_on: + - rhel92-fips + batchtime: 10080 + expansions: + SKIP_HATCH: "true" + AUTH: noauth + SSL: ssl +- name: other-hosts-rhel9-fips-noauth-nossl + tasks: + - name: .6.0 .standalone + display_name: Other hosts RHEL9-FIPS NoAuth NoSSL + run_on: + - rhel92-fips + batchtime: 10080 + expansions: + SKIP_HATCH: "true" + AUTH: noauth + SSL: nossl +- name: other-hosts-rhel8-zseries-auth-ssl + tasks: + - name: .6.0 .standalone + display_name: Other hosts RHEL8-zseries Auth SSL + run_on: + - rhel8-zseries-small + batchtime: 10080 + expansions: + SKIP_HATCH: "true" + AUTH: auth + SSL: ssl +- name: other-hosts-rhel8-zseries-noauth-ssl + tasks: + - name: .6.0 .standalone + display_name: Other hosts RHEL8-zseries NoAuth SSL + run_on: + - rhel8-zseries-small + batchtime: 10080 + expansions: + SKIP_HATCH: "true" + AUTH: noauth + SSL: ssl +- name: other-hosts-rhel8-zseries-noauth-nossl + tasks: + - name: .6.0 .standalone + display_name: Other hosts RHEL8-zseries NoAuth NoSSL + run_on: + - rhel8-zseries-small + batchtime: 10080 + expansions: + SKIP_HATCH: "true" + AUTH: noauth + SSL: nossl +- name: other-hosts-rhel8-power8-auth-ssl tasks: - - ".5.0" + - name: .6.0 .standalone + display_name: Other hosts RHEL8-POWER8 Auth SSL + run_on: + - rhel8-power-small + batchtime: 10080 + expansions: + SKIP_HATCH: "true" + AUTH: auth + SSL: ssl +- name: other-hosts-rhel8-power8-noauth-ssl + tasks: + - name: .6.0 .standalone + display_name: Other hosts RHEL8-POWER8 NoAuth SSL + run_on: + - rhel8-power-small + batchtime: 10080 + expansions: + SKIP_HATCH: "true" + AUTH: noauth + SSL: ssl +- name: other-hosts-rhel8-power8-noauth-nossl + tasks: + - name: .6.0 .standalone + display_name: Other hosts RHEL8-POWER8 NoAuth NoSSL + run_on: + - rhel8-power-small + batchtime: 10080 + expansions: + SKIP_HATCH: "true" + AUTH: noauth + SSL: nossl +- name: other-hosts-rhel8-arm64-auth-ssl + tasks: + - name: .6.0 .standalone + display_name: Other hosts RHEL8-arm64 Auth SSL + run_on: + - rhel82-arm64-small + batchtime: 10080 + expansions: + SKIP_HATCH: "true" + AUTH: auth + SSL: ssl +- name: other-hosts-rhel8-arm64-noauth-ssl + tasks: + - name: .6.0 .standalone + display_name: Other hosts RHEL8-arm64 NoAuth SSL + run_on: + - rhel82-arm64-small + batchtime: 10080 + expansions: + SKIP_HATCH: "true" + AUTH: noauth + SSL: ssl +- name: other-hosts-rhel8-arm64-noauth-nossl + tasks: + - name: .6.0 .standalone + display_name: Other hosts RHEL8-arm64 NoAuth NoSSL + run_on: + - rhel82-arm64-small + batchtime: 10080 + expansions: + SKIP_HATCH: "true" + AUTH: noauth + SSL: nossl - matrix_name: "test-search-index-helpers" matrix_spec: diff --git a/.evergreen/scripts/generate_config.py b/.evergreen/scripts/generate_config.py index 3f1ea724ed..1217c26885 100644 --- a/.evergreen/scripts/generate_config.py +++ b/.evergreen/scripts/generate_config.py @@ -49,6 +49,7 @@ class Host: display_name: str +# Hosts with toolchains. HOSTS["rhel8"] = Host("rhel8", "rhel87-small", "RHEL8") HOSTS["win64"] = Host("win64", "windows-64-vsMulti-small", "Win64") HOSTS["win32"] = Host("win32", "windows-64-vsMulti-small", "Win32") @@ -56,7 +57,7 @@ class Host: HOSTS["macos-arm64"] = Host("macos-arm64", "macos-14-arm64", "macOS Arm64") HOSTS["ubuntu20"] = Host("ubuntu20", "ubuntu2004-small", "Ubuntu-20") HOSTS["ubuntu22"] = Host("ubuntu22", "ubuntu2204-small", "Ubuntu-22") - +HOSTS["rhel7"] = Host("rhel7", "rhel79-small", "RHEL7") ############## # Helpers @@ -76,8 +77,11 @@ def create_variant( task_refs = [EvgTaskRef(name=n) for n in task_names] kwargs.setdefault("expansions", dict()) expansions = kwargs.pop("expansions", dict()).copy() - host = host or "rhel8" - run_on = [HOSTS[host].run_on] + if "run_on" in kwargs: + run_on = kwargs.pop("run_on") + else: + host = host or "rhel8" + run_on = [HOSTS[host].run_on] name = display_name.replace(" ", "-").lower() if python: expansions["PYTHON_BINARY"] = get_python_binary(python, host) @@ -104,7 +108,7 @@ def get_python_binary(python: str, host: str) -> str: python = python.replace(".", "") return f"{base}/Python{python}/python.exe" - if host in ["rhel8", "ubuntu22", "ubuntu20"]: + if host in ["rhel8", "ubuntu22", "ubuntu20", "rhel7"]: return f"/opt/python/{python}/bin/python3" if host in ["macos", "macos-arm64"]: @@ -131,9 +135,11 @@ def get_versions_until(max_version: str) -> list[str]: return versions -def get_display_name(base: str, host: str, **kwargs) -> str: +def get_display_name(base: str, host: str | None = None, **kwargs) -> str: """Get the display name of a variant.""" - display_name = f"{base} {HOSTS[host].display_name}" + display_name = base + if host is not None: + display_name += f" {HOSTS[host].display_name}" version = kwargs.pop("VERSION", None) if version: if version not in ["rapid", "latest"]: @@ -640,10 +646,48 @@ def generate_aws_auth_variants(): return variants +def generate_alternative_hosts_variants(): + base_expansions = dict(SKIP_HATCH="true") + batchtime = BATCHTIME_WEEK + variants = [] + + host = "rhel7" + for auth, ssl in AUTH_SSLS: + expansions = base_expansions.copy() + expansions["AUTH"] = auth + expansions["SSL"] = ssl + variants.append( + create_variant( + [".5.0 .standalone"], + get_display_name("OpenSSL 1.0.2", "rhel7", python=CPYTHONS[0], **expansions), + host=host, + python=CPYTHONS[0], + batchtime=batchtime, + expansions=expansions, + ) + ) + + hosts = ["rhel92-fips", "rhel8-zseries-small", "rhel8-power-small", "rhel82-arm64-small"] + host_names = ["RHEL9-FIPS", "RHEL8-zseries", "RHEL8-POWER8", "RHEL8-arm64"] + for (host, host_name), (auth, ssl) in product(zip(hosts, host_names), AUTH_SSLS): + expansions = base_expansions.copy() + expansions["AUTH"] = auth + expansions["SSL"] = ssl + variants.append( + create_variant( + [".6.0 .standalone"], + display_name=get_display_name(f"Other hosts {host_name}", **expansions), + expansions=expansions, + batchtime=batchtime, + run_on=[host], + ) + ) + return variants + + ################## # Generate Config ################## -variants = create_server_variants() -# print(len(variants)) +variants = generate_alternative_hosts_variants() generate_yaml(variants=variants) From 79ad2a14811ecb8d22d47ad5c2b7e4eb6e8de943 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Wed, 23 Oct 2024 10:10:32 -0500 Subject: [PATCH 067/182] PYTHON-4900 Convert remaining matrix definitions to use shrub.py (#1964) --- .evergreen/config.yml | 190 ++++++++------------------ .evergreen/scripts/generate_config.py | 69 +++++++++- 2 files changed, 127 insertions(+), 132 deletions(-) diff --git a/.evergreen/config.yml b/.evergreen/config.yml index 4868096e83..2d73e19a27 100644 --- a/.evergreen/config.yml +++ b/.evergreen/config.yml @@ -1990,99 +1990,6 @@ tasks: - mongo-python-driver - ${github_commit} -axes: - # Choice of distro - - id: platform - display_name: OS - values: - - id: rhel8 - display_name: "RHEL 8.x" - run_on: rhel8.8-small - batchtime: 10080 # 7 days - - id: windows - display_name: "Windows 64" - run_on: windows-64-vsMulti-small - batchtime: 10080 # 7 days - - id: macos - display_name: "macOS" - run_on: macos-14 - - # Test with authentication? - - id: auth - display_name: Authentication - values: - - id: auth - display_name: Auth - variables: - AUTH: "auth" - - id: noauth - display_name: NoAuth - variables: - AUTH: "noauth" - - # Test with SSL? - - id: ssl - display_name: SSL - values: - - id: ssl - display_name: SSL - variables: - SSL: "ssl" - - id: nossl - display_name: NoSSL - variables: - SSL: "nossl" - - # Test with Auth + SSL (combined for convenience)? - - id: auth-ssl - display_name: Auth SSL - values: - - id: auth-ssl - display_name: Auth SSL - variables: - AUTH: "auth" - SSL: "ssl" - - id: noauth-nossl - display_name: NoAuth NoSSL - variables: - AUTH: "noauth" - SSL: "nossl" - - # Choice of Python runtime version - - id: python-version - display_name: "Python" - values: - # Note: always display platform with python-version to avoid ambiguous display names. - # Linux - - id: "3.9" - display_name: "Python 3.9" - variables: - PYTHON_BINARY: "/opt/python/3.9/bin/python3" - - id: "3.10" - display_name: "Python 3.10" - variables: - PYTHON_BINARY: "/opt/python/3.10/bin/python3" - - id: "3.11" - display_name: "Python 3.11" - variables: - PYTHON_BINARY: "/opt/python/3.11/bin/python3" - - id: "3.12" - display_name: "Python 3.12" - variables: - PYTHON_BINARY: "/opt/python/3.12/bin/python3" - - id: "3.13" - display_name: "Python 3.13" - variables: - PYTHON_BINARY: "/opt/python/3.13/bin/python3" - - id: "pypy3.9" - display_name: "PyPy 3.9" - variables: - PYTHON_BINARY: "/opt/python/pypy3.9/bin/pypy3" - - id: "pypy3.10" - display_name: "PyPy 3.10" - variables: - PYTHON_BINARY: "/opt/python/pypy3.10/bin/pypy3" - buildvariants: # Server Tests. - name: test-rhel8-py3.9-auth-ssl-cov @@ -3970,30 +3877,67 @@ buildvariants: SKIP_HATCH: "true" AUTH: noauth SSL: nossl - -- matrix_name: "test-search-index-helpers" - matrix_spec: - platform: rhel8 - python-version: "3.9" - display_name: "Search Index Helpers ${platform}" +- name: oidc-auth-rhel8 tasks: - - name: "test_atlas_task_group_search_indexes" - -- matrix_name: "mockupdb-tests" - matrix_spec: - platform: rhel8 - python-version: 3.9 - display_name: "MockupDB Tests" + - name: testoidc_task_group + display_name: OIDC Auth RHEL8 + run_on: + - rhel87-small + batchtime: 20160 +- name: oidc-auth-macos tasks: - - name: "mockupdb" - -- matrix_name: "tests-doctests" - matrix_spec: - platform: rhel8 - python-version: ["3.9"] - display_name: "Doctests ${python-version} ${platform}" + - name: testoidc_task_group + display_name: OIDC Auth macOS + run_on: + - macos-14 + batchtime: 20160 +- name: oidc-auth-win64 tasks: - - name: "doctests" + - name: testoidc_task_group + display_name: OIDC Auth Win64 + run_on: + - windows-64-vsMulti-small + batchtime: 20160 +- name: atlas-connect-rhel8-py3.9 + tasks: + - name: atlas-connect + display_name: Atlas connect RHEL8 py3.9 + run_on: + - rhel87-small + expansions: + PYTHON_BINARY: /opt/python/3.9/bin/python3 +- name: atlas-connect-rhel8-py3.13 + tasks: + - name: atlas-connect + display_name: Atlas connect RHEL8 py3.13 + run_on: + - rhel87-small + expansions: + PYTHON_BINARY: /opt/python/3.13/bin/python3 +- name: doctests-rhel8-py3.9 + tasks: + - name: doctests + display_name: Doctests RHEL8 py3.9 + run_on: + - rhel87-small + expansions: + PYTHON_BINARY: /opt/python/3.9/bin/python3 +- name: mockupdb-tests-rhel8-py3.9 + tasks: + - name: mockupdb + display_name: MockupDB Tests RHEL8 py3.9 + run_on: + - rhel87-small + expansions: + PYTHON_BINARY: /opt/python/3.9/bin/python3 +- name: search-index-helpers-rhel8-py3.9 + tasks: + - name: test_atlas_task_group_search_indexes + display_name: Search Index Helpers RHEL8 py3.9 + run_on: + - rhel87-small + expansions: + PYTHON_BINARY: /opt/python/3.9/bin/python3 - name: "no-server" display_name: "No server test" @@ -4009,15 +3953,7 @@ buildvariants: tasks: - name: "coverage-report" -- matrix_name: "atlas-connect" - matrix_spec: - platform: rhel8 - python-version: "*" - display_name: "Atlas connect ${python-version} ${platform}" - tasks: - - name: "atlas-connect" - -# OCSP test matrix. +# OCSP tests. - name: ocsp-test-rhel8-v4.4-py3.9 tasks: - name: .ocsp @@ -4359,14 +4295,6 @@ buildvariants: test_loadbalancer: "true" PYTHON_BINARY: /opt/python/3.9/bin/python3 -- matrix_name: "oidc-auth-test" - matrix_spec: - platform: [ rhel8, macos, windows ] - display_name: "OIDC Auth ${platform}" - tasks: - - name: testoidc_task_group - batchtime: 20160 # 14 days - - name: testazureoidc-variant display_name: "OIDC Auth Azure" run_on: ubuntu2204-small diff --git a/.evergreen/scripts/generate_config.py b/.evergreen/scripts/generate_config.py index 1217c26885..7adeac82e0 100644 --- a/.evergreen/scripts/generate_config.py +++ b/.evergreen/scripts/generate_config.py @@ -59,6 +59,7 @@ class Host: HOSTS["ubuntu22"] = Host("ubuntu22", "ubuntu2204-small", "Ubuntu-22") HOSTS["rhel7"] = Host("rhel7", "rhel79-small", "RHEL7") + ############## # Helpers ############## @@ -616,6 +617,72 @@ def generate_serverless_variants(): ] +def generate_oidc_auth_variants(): + variants = [] + for host in ["rhel8", "macos", "win64"]: + variants.append( + create_variant( + ["testoidc_task_group"], + get_display_name("OIDC Auth", host), + host=host, + batchtime=BATCHTIME_WEEK * 2, + ) + ) + return variants + + +def generate_search_index_variants(): + host = "rhel8" + python = CPYTHONS[0] + return [ + create_variant( + ["test_atlas_task_group_search_indexes"], + get_display_name("Search Index Helpers", host, python=python), + python=python, + host=host, + ) + ] + + +def generate_mockupdb_variants(): + host = "rhel8" + python = CPYTHONS[0] + return [ + create_variant( + ["mockupdb"], + get_display_name("MockupDB Tests", host, python=python), + python=python, + host=host, + ) + ] + + +def generate_doctests_variants(): + host = "rhel8" + python = CPYTHONS[0] + return [ + create_variant( + ["doctests"], + get_display_name("Doctests", host, python=python), + python=python, + host=host, + ) + ] + + +def generate_atlas_connect_variants(): + host = "rhel8" + return [ + create_variant( + ["atlas-connect"], + get_display_name("Atlas connect", host, python=python), + python=python, + host=host, + ) + for python in MIN_MAX_PYTHON + ] + + def generate_aws_auth_variants(): variants = [] tasks = [ @@ -689,5 +756,5 @@ def generate_alternative_hosts_variants(): # Generate Config ################## -variants = generate_alternative_hosts_variants() +variants = generate_search_index_variants() generate_yaml(variants=variants) From 493c331bb83e6c78bc2fb470ec0f5bd06f939a02 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Wed, 23 Oct 2024 11:08:50 -0500 Subject: [PATCH 068/182] PYTHON-4897 Remove Assign PR Reviewer from PyMongo (#1960) --- .evergreen/config.yml | 23 ----------------------- 1 file changed, 23 deletions(-) diff --git a/.evergreen/config.yml b/.evergreen/config.yml index 2d73e19a27..17d12742a6 100644 --- a/.evergreen/config.yml +++ b/.evergreen/config.yml @@ -1945,23 +1945,6 @@ tasks: - func: "attach benchmark test results" - func: "send dashboard data" - - name: "assign-pr-reviewer" - tags: ["pr"] - allowed_requesters: ["patch", "github_pr"] - commands: - - command: shell.exec - type: test - params: - shell: "bash" - working_dir: src - script: | - . .evergreen/scripts/env.sh - set -x - export CONFIG=$PROJECT_DIRECTORY/.github/reviewers.txt - export SCRIPT="$DRIVERS_TOOLS/.evergreen/github_app/assign-reviewer.sh" - bash $SCRIPT -p $CONFIG -h ${github_commit} -o "mongodb" -n "mongo-python-driver" - echo '{"results": [{ "status": "PASS", "test_file": "Build", "log_raw": "Test completed" } ]}' > ${PROJECT_DIRECTORY}/test-results.json - - name: "check-import-time" tags: ["pr"] commands: @@ -4332,12 +4315,6 @@ buildvariants: tasks: - name: test_aws_lambda_task_group -- name: rhel8-pr-assign-reviewer - display_name: Assign PR Reviewer - run_on: rhel87-small - tasks: - - name: "assign-pr-reviewer" - - name: rhel8-import-time display_name: Import Time Check run_on: rhel87-small From cb8cf03eb52c5a84616faf3112d36e0ceb832ab5 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Wed, 23 Oct 2024 13:29:09 -0500 Subject: [PATCH 069/182] PYTHON-4901 Move generated Evergreen variants to an included file (#1965) --- .evergreen/config.yml | 2293 +-------------------- .evergreen/generated_configs/variants.yml | 2204 ++++++++++++++++++++ .evergreen/scripts/generate_config.py | 69 +- 3 files changed, 2258 insertions(+), 2308 deletions(-) create mode 100644 .evergreen/generated_configs/variants.yml diff --git a/.evergreen/config.yml b/.evergreen/config.yml index 17d12742a6..6e48a380d3 100644 --- a/.evergreen/config.yml +++ b/.evergreen/config.yml @@ -25,6 +25,9 @@ timeout: script: | ls -la +include: + - filename: .evergreen/generated_configs/variants.yml + functions: "fetch source": # Executes clone and applies the submitted patch, if any @@ -1974,1954 +1977,6 @@ tasks: - ${github_commit} buildvariants: -# Server Tests. -- name: test-rhel8-py3.9-auth-ssl-cov - tasks: - - name: .standalone - - name: .replica_set - - name: .sharded_cluster - display_name: Test RHEL8 py3.9 Auth SSL cov - run_on: - - rhel87-small - expansions: - AUTH: auth - SSL: ssl - COVERAGE: coverage - PYTHON_BINARY: /opt/python/3.9/bin/python3 - tags: [coverage_tag] -- name: test-rhel8-py3.9-noauth-ssl-cov - tasks: - - name: .standalone - - name: .replica_set - - name: .sharded_cluster - display_name: Test RHEL8 py3.9 NoAuth SSL cov - run_on: - - rhel87-small - expansions: - AUTH: noauth - SSL: ssl - COVERAGE: coverage - PYTHON_BINARY: /opt/python/3.9/bin/python3 - tags: [coverage_tag] -- name: test-rhel8-py3.9-noauth-nossl-cov - tasks: - - name: .standalone - - name: .replica_set - - name: .sharded_cluster - display_name: Test RHEL8 py3.9 NoAuth NoSSL cov - run_on: - - rhel87-small - expansions: - AUTH: noauth - SSL: nossl - COVERAGE: coverage - PYTHON_BINARY: /opt/python/3.9/bin/python3 - tags: [coverage_tag] -- name: test-rhel8-py3.13-auth-ssl-cov - tasks: - - name: .standalone - - name: .replica_set - - name: .sharded_cluster - display_name: Test RHEL8 py3.13 Auth SSL cov - run_on: - - rhel87-small - expansions: - AUTH: auth - SSL: ssl - COVERAGE: coverage - PYTHON_BINARY: /opt/python/3.13/bin/python3 - tags: [coverage_tag] -- name: test-rhel8-py3.13-noauth-ssl-cov - tasks: - - name: .standalone - - name: .replica_set - - name: .sharded_cluster - display_name: Test RHEL8 py3.13 NoAuth SSL cov - run_on: - - rhel87-small - expansions: - AUTH: noauth - SSL: ssl - COVERAGE: coverage - PYTHON_BINARY: /opt/python/3.13/bin/python3 - tags: [coverage_tag] -- name: test-rhel8-py3.13-noauth-nossl-cov - tasks: - - name: .standalone - - name: .replica_set - - name: .sharded_cluster - display_name: Test RHEL8 py3.13 NoAuth NoSSL cov - run_on: - - rhel87-small - expansions: - AUTH: noauth - SSL: nossl - COVERAGE: coverage - PYTHON_BINARY: /opt/python/3.13/bin/python3 - tags: [coverage_tag] -- name: test-rhel8-pypy3.10-auth-ssl-cov - tasks: - - name: .standalone - - name: .replica_set - - name: .sharded_cluster - display_name: Test RHEL8 pypy3.10 Auth SSL cov - run_on: - - rhel87-small - expansions: - AUTH: auth - SSL: ssl - COVERAGE: coverage - PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 - tags: [coverage_tag] -- name: test-rhel8-pypy3.10-noauth-ssl-cov - tasks: - - name: .standalone - - name: .replica_set - - name: .sharded_cluster - display_name: Test RHEL8 pypy3.10 NoAuth SSL cov - run_on: - - rhel87-small - expansions: - AUTH: noauth - SSL: ssl - COVERAGE: coverage - PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 - tags: [coverage_tag] -- name: test-rhel8-pypy3.10-noauth-nossl-cov - tasks: - - name: .standalone - - name: .replica_set - - name: .sharded_cluster - display_name: Test RHEL8 pypy3.10 NoAuth NoSSL cov - run_on: - - rhel87-small - expansions: - AUTH: noauth - SSL: nossl - COVERAGE: coverage - PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 - tags: [coverage_tag] -- name: test-rhel8-py3.10-auth-ssl - tasks: - - name: .standalone - display_name: Test RHEL8 py3.10 Auth SSL - run_on: - - rhel87-small - expansions: - AUTH: auth - SSL: ssl - PYTHON_BINARY: /opt/python/3.10/bin/python3 -- name: test-rhel8-py3.11-noauth-ssl - tasks: - - name: .replica_set - display_name: Test RHEL8 py3.11 NoAuth SSL - run_on: - - rhel87-small - expansions: - AUTH: noauth - SSL: ssl - PYTHON_BINARY: /opt/python/3.11/bin/python3 -- name: test-rhel8-py3.12-noauth-nossl - tasks: - - name: .sharded_cluster - display_name: Test RHEL8 py3.12 NoAuth NoSSL - run_on: - - rhel87-small - expansions: - AUTH: noauth - SSL: nossl - PYTHON_BINARY: /opt/python/3.12/bin/python3 -- name: test-rhel8-pypy3.9-auth-ssl - tasks: - - name: .standalone - display_name: Test RHEL8 pypy3.9 Auth SSL - run_on: - - rhel87-small - expansions: - AUTH: auth - SSL: ssl - PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 -- name: test-macos-py3.9-auth-ssl-sync - tasks: - - name: .standalone - display_name: Test macOS py3.9 Auth SSL Sync - run_on: - - macos-14 - expansions: - AUTH: auth - SSL: ssl - TEST_SUITES: default - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 -- name: test-macos-py3.9-noauth-ssl-sync - tasks: - - name: .standalone - display_name: Test macOS py3.9 NoAuth SSL Sync - run_on: - - macos-14 - expansions: - AUTH: noauth - SSL: ssl - TEST_SUITES: default - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 -- name: test-macos-py3.9-noauth-nossl-sync - tasks: - - name: .standalone - display_name: Test macOS py3.9 NoAuth NoSSL Sync - run_on: - - macos-14 - expansions: - AUTH: noauth - SSL: nossl - TEST_SUITES: default - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 -- name: test-macos-py3.9-auth-ssl-async - tasks: - - name: .standalone - display_name: Test macOS py3.9 Auth SSL Async - run_on: - - macos-14 - expansions: - AUTH: auth - SSL: ssl - TEST_SUITES: default_async - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 -- name: test-macos-py3.9-noauth-ssl-async - tasks: - - name: .standalone - display_name: Test macOS py3.9 NoAuth SSL Async - run_on: - - macos-14 - expansions: - AUTH: noauth - SSL: ssl - TEST_SUITES: default_async - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 -- name: test-macos-py3.9-noauth-nossl-async - tasks: - - name: .standalone - display_name: Test macOS py3.9 NoAuth NoSSL Async - run_on: - - macos-14 - expansions: - AUTH: noauth - SSL: nossl - TEST_SUITES: default_async - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 -- name: test-macos-py3.13-auth-ssl-sync - tasks: - - name: .sharded_cluster - display_name: Test macOS py3.13 Auth SSL Sync - run_on: - - macos-14 - expansions: - AUTH: auth - SSL: ssl - TEST_SUITES: default - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 -- name: test-macos-py3.13-noauth-ssl-sync - tasks: - - name: .sharded_cluster - display_name: Test macOS py3.13 NoAuth SSL Sync - run_on: - - macos-14 - expansions: - AUTH: noauth - SSL: ssl - TEST_SUITES: default - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 -- name: test-macos-py3.13-noauth-nossl-sync - tasks: - - name: .sharded_cluster - display_name: Test macOS py3.13 NoAuth NoSSL Sync - run_on: - - macos-14 - expansions: - AUTH: noauth - SSL: nossl - TEST_SUITES: default - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 -- name: test-macos-py3.13-auth-ssl-async - tasks: - - name: .sharded_cluster - display_name: Test macOS py3.13 Auth SSL Async - run_on: - - macos-14 - expansions: - AUTH: auth - SSL: ssl - TEST_SUITES: default_async - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 -- name: test-macos-py3.13-noauth-ssl-async - tasks: - - name: .sharded_cluster - display_name: Test macOS py3.13 NoAuth SSL Async - run_on: - - macos-14 - expansions: - AUTH: noauth - SSL: ssl - TEST_SUITES: default_async - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 -- name: test-macos-py3.13-noauth-nossl-async - tasks: - - name: .sharded_cluster - display_name: Test macOS py3.13 NoAuth NoSSL Async - run_on: - - macos-14 - expansions: - AUTH: noauth - SSL: nossl - TEST_SUITES: default_async - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 -- name: test-macos-arm64-py3.9-auth-ssl-sync - tasks: - - name: .standalone .6.0 - - name: .standalone .7.0 - - name: .standalone .8.0 - - name: .standalone .rapid - - name: .standalone .latest - display_name: Test macOS Arm64 py3.9 Auth SSL Sync - run_on: - - macos-14-arm64 - expansions: - AUTH: auth - SSL: ssl - TEST_SUITES: default - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 -- name: test-macos-arm64-py3.9-noauth-ssl-sync - tasks: - - name: .standalone .6.0 - - name: .standalone .7.0 - - name: .standalone .8.0 - - name: .standalone .rapid - - name: .standalone .latest - display_name: Test macOS Arm64 py3.9 NoAuth SSL Sync - run_on: - - macos-14-arm64 - expansions: - AUTH: noauth - SSL: ssl - TEST_SUITES: default - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 -- name: test-macos-arm64-py3.9-noauth-nossl-sync - tasks: - - name: .standalone .6.0 - - name: .standalone .7.0 - - name: .standalone .8.0 - - name: .standalone .rapid - - name: .standalone .latest - display_name: Test macOS Arm64 py3.9 NoAuth NoSSL Sync - run_on: - - macos-14-arm64 - expansions: - AUTH: noauth - SSL: nossl - TEST_SUITES: default - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 -- name: test-macos-arm64-py3.9-auth-ssl-async - tasks: - - name: .standalone .6.0 - - name: .standalone .7.0 - - name: .standalone .8.0 - - name: .standalone .rapid - - name: .standalone .latest - display_name: Test macOS Arm64 py3.9 Auth SSL Async - run_on: - - macos-14-arm64 - expansions: - AUTH: auth - SSL: ssl - TEST_SUITES: default_async - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 -- name: test-macos-arm64-py3.9-noauth-ssl-async - tasks: - - name: .standalone .6.0 - - name: .standalone .7.0 - - name: .standalone .8.0 - - name: .standalone .rapid - - name: .standalone .latest - display_name: Test macOS Arm64 py3.9 NoAuth SSL Async - run_on: - - macos-14-arm64 - expansions: - AUTH: noauth - SSL: ssl - TEST_SUITES: default_async - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 -- name: test-macos-arm64-py3.9-noauth-nossl-async - tasks: - - name: .standalone .6.0 - - name: .standalone .7.0 - - name: .standalone .8.0 - - name: .standalone .rapid - - name: .standalone .latest - display_name: Test macOS Arm64 py3.9 NoAuth NoSSL Async - run_on: - - macos-14-arm64 - expansions: - AUTH: noauth - SSL: nossl - TEST_SUITES: default_async - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 -- name: test-macos-arm64-py3.13-auth-ssl-sync - tasks: - - name: .sharded_cluster .6.0 - - name: .sharded_cluster .7.0 - - name: .sharded_cluster .8.0 - - name: .sharded_cluster .rapid - - name: .sharded_cluster .latest - display_name: Test macOS Arm64 py3.13 Auth SSL Sync - run_on: - - macos-14-arm64 - expansions: - AUTH: auth - SSL: ssl - TEST_SUITES: default - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 -- name: test-macos-arm64-py3.13-noauth-ssl-sync - tasks: - - name: .sharded_cluster .6.0 - - name: .sharded_cluster .7.0 - - name: .sharded_cluster .8.0 - - name: .sharded_cluster .rapid - - name: .sharded_cluster .latest - display_name: Test macOS Arm64 py3.13 NoAuth SSL Sync - run_on: - - macos-14-arm64 - expansions: - AUTH: noauth - SSL: ssl - TEST_SUITES: default - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 -- name: test-macos-arm64-py3.13-noauth-nossl-sync - tasks: - - name: .sharded_cluster .6.0 - - name: .sharded_cluster .7.0 - - name: .sharded_cluster .8.0 - - name: .sharded_cluster .rapid - - name: .sharded_cluster .latest - display_name: Test macOS Arm64 py3.13 NoAuth NoSSL Sync - run_on: - - macos-14-arm64 - expansions: - AUTH: noauth - SSL: nossl - TEST_SUITES: default - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 -- name: test-macos-arm64-py3.13-auth-ssl-async - tasks: - - name: .sharded_cluster .6.0 - - name: .sharded_cluster .7.0 - - name: .sharded_cluster .8.0 - - name: .sharded_cluster .rapid - - name: .sharded_cluster .latest - display_name: Test macOS Arm64 py3.13 Auth SSL Async - run_on: - - macos-14-arm64 - expansions: - AUTH: auth - SSL: ssl - TEST_SUITES: default_async - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 -- name: test-macos-arm64-py3.13-noauth-ssl-async - tasks: - - name: .sharded_cluster .6.0 - - name: .sharded_cluster .7.0 - - name: .sharded_cluster .8.0 - - name: .sharded_cluster .rapid - - name: .sharded_cluster .latest - display_name: Test macOS Arm64 py3.13 NoAuth SSL Async - run_on: - - macos-14-arm64 - expansions: - AUTH: noauth - SSL: ssl - TEST_SUITES: default_async - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 -- name: test-macos-arm64-py3.13-noauth-nossl-async - tasks: - - name: .sharded_cluster .6.0 - - name: .sharded_cluster .7.0 - - name: .sharded_cluster .8.0 - - name: .sharded_cluster .rapid - - name: .sharded_cluster .latest - display_name: Test macOS Arm64 py3.13 NoAuth NoSSL Async - run_on: - - macos-14-arm64 - expansions: - AUTH: noauth - SSL: nossl - TEST_SUITES: default_async - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 -- name: test-win64-py3.9-auth-ssl-sync - tasks: - - name: .standalone - display_name: Test Win64 py3.9 Auth SSL Sync - run_on: - - windows-64-vsMulti-small - expansions: - AUTH: auth - SSL: ssl - TEST_SUITES: default - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: C:/python/Python39/python.exe -- name: test-win64-py3.9-noauth-ssl-sync - tasks: - - name: .standalone - display_name: Test Win64 py3.9 NoAuth SSL Sync - run_on: - - windows-64-vsMulti-small - expansions: - AUTH: noauth - SSL: ssl - TEST_SUITES: default - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: C:/python/Python39/python.exe -- name: test-win64-py3.9-noauth-nossl-sync - tasks: - - name: .standalone - display_name: Test Win64 py3.9 NoAuth NoSSL Sync - run_on: - - windows-64-vsMulti-small - expansions: - AUTH: noauth - SSL: nossl - TEST_SUITES: default - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: C:/python/Python39/python.exe -- name: test-win64-py3.9-auth-ssl-async - tasks: - - name: .standalone - display_name: Test Win64 py3.9 Auth SSL Async - run_on: - - windows-64-vsMulti-small - expansions: - AUTH: auth - SSL: ssl - TEST_SUITES: default_async - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: C:/python/Python39/python.exe -- name: test-win64-py3.9-noauth-ssl-async - tasks: - - name: .standalone - display_name: Test Win64 py3.9 NoAuth SSL Async - run_on: - - windows-64-vsMulti-small - expansions: - AUTH: noauth - SSL: ssl - TEST_SUITES: default_async - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: C:/python/Python39/python.exe -- name: test-win64-py3.9-noauth-nossl-async - tasks: - - name: .standalone - display_name: Test Win64 py3.9 NoAuth NoSSL Async - run_on: - - windows-64-vsMulti-small - expansions: - AUTH: noauth - SSL: nossl - TEST_SUITES: default_async - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: C:/python/Python39/python.exe -- name: test-win64-py3.13-auth-ssl-sync - tasks: - - name: .sharded_cluster - display_name: Test Win64 py3.13 Auth SSL Sync - run_on: - - windows-64-vsMulti-small - expansions: - AUTH: auth - SSL: ssl - TEST_SUITES: default - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: C:/python/Python313/python.exe -- name: test-win64-py3.13-noauth-ssl-sync - tasks: - - name: .sharded_cluster - display_name: Test Win64 py3.13 NoAuth SSL Sync - run_on: - - windows-64-vsMulti-small - expansions: - AUTH: noauth - SSL: ssl - TEST_SUITES: default - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: C:/python/Python313/python.exe -- name: test-win64-py3.13-noauth-nossl-sync - tasks: - - name: .sharded_cluster - display_name: Test Win64 py3.13 NoAuth NoSSL Sync - run_on: - - windows-64-vsMulti-small - expansions: - AUTH: noauth - SSL: nossl - TEST_SUITES: default - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: C:/python/Python313/python.exe -- name: test-win64-py3.13-auth-ssl-async - tasks: - - name: .sharded_cluster - display_name: Test Win64 py3.13 Auth SSL Async - run_on: - - windows-64-vsMulti-small - expansions: - AUTH: auth - SSL: ssl - TEST_SUITES: default_async - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: C:/python/Python313/python.exe -- name: test-win64-py3.13-noauth-ssl-async - tasks: - - name: .sharded_cluster - display_name: Test Win64 py3.13 NoAuth SSL Async - run_on: - - windows-64-vsMulti-small - expansions: - AUTH: noauth - SSL: ssl - TEST_SUITES: default_async - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: C:/python/Python313/python.exe -- name: test-win64-py3.13-noauth-nossl-async - tasks: - - name: .sharded_cluster - display_name: Test Win64 py3.13 NoAuth NoSSL Async - run_on: - - windows-64-vsMulti-small - expansions: - AUTH: noauth - SSL: nossl - TEST_SUITES: default_async - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: C:/python/Python313/python.exe -- name: test-win32-py3.9-auth-ssl-sync - tasks: - - name: .standalone - display_name: Test Win32 py3.9 Auth SSL Sync - run_on: - - windows-64-vsMulti-small - expansions: - AUTH: auth - SSL: ssl - TEST_SUITES: default - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: C:/python/32/Python39/python.exe -- name: test-win32-py3.9-noauth-ssl-sync - tasks: - - name: .standalone - display_name: Test Win32 py3.9 NoAuth SSL Sync - run_on: - - windows-64-vsMulti-small - expansions: - AUTH: noauth - SSL: ssl - TEST_SUITES: default - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: C:/python/32/Python39/python.exe -- name: test-win32-py3.9-noauth-nossl-sync - tasks: - - name: .standalone - display_name: Test Win32 py3.9 NoAuth NoSSL Sync - run_on: - - windows-64-vsMulti-small - expansions: - AUTH: noauth - SSL: nossl - TEST_SUITES: default - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: C:/python/32/Python39/python.exe -- name: test-win32-py3.9-auth-ssl-async - tasks: - - name: .standalone - display_name: Test Win32 py3.9 Auth SSL Async - run_on: - - windows-64-vsMulti-small - expansions: - AUTH: auth - SSL: ssl - TEST_SUITES: default_async - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: C:/python/32/Python39/python.exe -- name: test-win32-py3.9-noauth-ssl-async - tasks: - - name: .standalone - display_name: Test Win32 py3.9 NoAuth SSL Async - run_on: - - windows-64-vsMulti-small - expansions: - AUTH: noauth - SSL: ssl - TEST_SUITES: default_async - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: C:/python/32/Python39/python.exe -- name: test-win32-py3.9-noauth-nossl-async - tasks: - - name: .standalone - display_name: Test Win32 py3.9 NoAuth NoSSL Async - run_on: - - windows-64-vsMulti-small - expansions: - AUTH: noauth - SSL: nossl - TEST_SUITES: default_async - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: C:/python/32/Python39/python.exe -- name: test-win32-py3.13-auth-ssl-sync - tasks: - - name: .sharded_cluster - display_name: Test Win32 py3.13 Auth SSL Sync - run_on: - - windows-64-vsMulti-small - expansions: - AUTH: auth - SSL: ssl - TEST_SUITES: default - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: C:/python/32/Python313/python.exe -- name: test-win32-py3.13-noauth-ssl-sync - tasks: - - name: .sharded_cluster - display_name: Test Win32 py3.13 NoAuth SSL Sync - run_on: - - windows-64-vsMulti-small - expansions: - AUTH: noauth - SSL: ssl - TEST_SUITES: default - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: C:/python/32/Python313/python.exe -- name: test-win32-py3.13-noauth-nossl-sync - tasks: - - name: .sharded_cluster - display_name: Test Win32 py3.13 NoAuth NoSSL Sync - run_on: - - windows-64-vsMulti-small - expansions: - AUTH: noauth - SSL: nossl - TEST_SUITES: default - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: C:/python/32/Python313/python.exe -- name: test-win32-py3.13-auth-ssl-async - tasks: - - name: .sharded_cluster - display_name: Test Win32 py3.13 Auth SSL Async - run_on: - - windows-64-vsMulti-small - expansions: - AUTH: auth - SSL: ssl - TEST_SUITES: default_async - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: C:/python/32/Python313/python.exe -- name: test-win32-py3.13-noauth-ssl-async - tasks: - - name: .sharded_cluster - display_name: Test Win32 py3.13 NoAuth SSL Async - run_on: - - windows-64-vsMulti-small - expansions: - AUTH: noauth - SSL: ssl - TEST_SUITES: default_async - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: C:/python/32/Python313/python.exe -- name: test-win32-py3.13-noauth-nossl-async - tasks: - - name: .sharded_cluster - display_name: Test Win32 py3.13 NoAuth NoSSL Async - run_on: - - windows-64-vsMulti-small - expansions: - AUTH: noauth - SSL: nossl - TEST_SUITES: default_async - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: C:/python/32/Python313/python.exe - -# Encryption tests. -- name: encryption-rhel8-py3.9-auth-ssl - tasks: - - name: .standalone - - name: .replica_set - - name: .sharded_cluster - display_name: Encryption RHEL8 py3.9 Auth SSL - run_on: - - rhel87-small - batchtime: 10080 - expansions: - AUTH: auth - SSL: ssl - test_encryption: "true" - PYTHON_BINARY: /opt/python/3.9/bin/python3 - tags: [encryption_tag] -- name: encryption-rhel8-py3.13-auth-ssl - tasks: - - name: .standalone - - name: .replica_set - - name: .sharded_cluster - display_name: Encryption RHEL8 py3.13 Auth SSL - run_on: - - rhel87-small - batchtime: 10080 - expansions: - AUTH: auth - SSL: ssl - test_encryption: "true" - PYTHON_BINARY: /opt/python/3.13/bin/python3 - tags: [encryption_tag] -- name: encryption-rhel8-pypy3.10-auth-ssl - tasks: - - name: .standalone - - name: .replica_set - - name: .sharded_cluster - display_name: Encryption RHEL8 pypy3.10 Auth SSL - run_on: - - rhel87-small - batchtime: 10080 - expansions: - AUTH: auth - SSL: ssl - test_encryption: "true" - PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 - tags: [encryption_tag] -- name: encryption-crypt_shared-rhel8-py3.9-auth-ssl - tasks: - - name: .standalone - - name: .replica_set - - name: .sharded_cluster - display_name: Encryption crypt_shared RHEL8 py3.9 Auth SSL - run_on: - - rhel87-small - batchtime: 10080 - expansions: - AUTH: auth - SSL: ssl - test_encryption: "true" - test_crypt_shared: "true" - PYTHON_BINARY: /opt/python/3.9/bin/python3 - tags: [encryption_tag] -- name: encryption-crypt_shared-rhel8-py3.13-auth-ssl - tasks: - - name: .standalone - - name: .replica_set - - name: .sharded_cluster - display_name: Encryption crypt_shared RHEL8 py3.13 Auth SSL - run_on: - - rhel87-small - batchtime: 10080 - expansions: - AUTH: auth - SSL: ssl - test_encryption: "true" - test_crypt_shared: "true" - PYTHON_BINARY: /opt/python/3.13/bin/python3 - tags: [encryption_tag] -- name: encryption-crypt_shared-rhel8-pypy3.10-auth-ssl - tasks: - - name: .standalone - - name: .replica_set - - name: .sharded_cluster - display_name: Encryption crypt_shared RHEL8 pypy3.10 Auth SSL - run_on: - - rhel87-small - batchtime: 10080 - expansions: - AUTH: auth - SSL: ssl - test_encryption: "true" - test_crypt_shared: "true" - PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 - tags: [encryption_tag] -- name: encryption-pyopenssl-rhel8-py3.9-auth-ssl - tasks: - - name: .standalone - - name: .replica_set - - name: .sharded_cluster - display_name: Encryption PyOpenSSL RHEL8 py3.9 Auth SSL - run_on: - - rhel87-small - batchtime: 10080 - expansions: - AUTH: auth - SSL: ssl - test_encryption: "true" - test_encryption_pyopenssl: "true" - PYTHON_BINARY: /opt/python/3.9/bin/python3 - tags: [encryption_tag] -- name: encryption-pyopenssl-rhel8-py3.13-auth-ssl - tasks: - - name: .standalone - - name: .replica_set - - name: .sharded_cluster - display_name: Encryption PyOpenSSL RHEL8 py3.13 Auth SSL - run_on: - - rhel87-small - batchtime: 10080 - expansions: - AUTH: auth - SSL: ssl - test_encryption: "true" - test_encryption_pyopenssl: "true" - PYTHON_BINARY: /opt/python/3.13/bin/python3 - tags: [encryption_tag] -- name: encryption-pyopenssl-rhel8-pypy3.10-auth-ssl - tasks: - - name: .standalone - - name: .replica_set - - name: .sharded_cluster - display_name: Encryption PyOpenSSL RHEL8 pypy3.10 Auth SSL - run_on: - - rhel87-small - batchtime: 10080 - expansions: - AUTH: auth - SSL: ssl - test_encryption: "true" - test_encryption_pyopenssl: "true" - PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 - tags: [encryption_tag] -- name: encryption-rhel8-py3.10-auth-ssl - tasks: - - name: .replica_set - display_name: Encryption RHEL8 py3.10 Auth SSL - run_on: - - rhel87-small - expansions: - AUTH: auth - SSL: ssl - test_encryption: "true" - PYTHON_BINARY: /opt/python/3.10/bin/python3 -- name: encryption-crypt_shared-rhel8-py3.11-auth-nossl - tasks: - - name: .replica_set - display_name: Encryption crypt_shared RHEL8 py3.11 Auth NoSSL - run_on: - - rhel87-small - expansions: - AUTH: auth - SSL: nossl - test_encryption: "true" - test_crypt_shared: "true" - PYTHON_BINARY: /opt/python/3.11/bin/python3 -- name: encryption-pyopenssl-rhel8-py3.12-auth-ssl - tasks: - - name: .replica_set - display_name: Encryption PyOpenSSL RHEL8 py3.12 Auth SSL - run_on: - - rhel87-small - expansions: - AUTH: auth - SSL: ssl - test_encryption: "true" - TEST_ENCRYPTION_PYOPENSSL: "true" - PYTHON_BINARY: /opt/python/3.12/bin/python3 -- name: encryption-rhel8-pypy3.9-auth-nossl - tasks: - - name: .replica_set - display_name: Encryption RHEL8 pypy3.9 Auth NoSSL - run_on: - - rhel87-small - expansions: - AUTH: auth - SSL: nossl - test_encryption: "true" - PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 -- name: encryption-macos-py3.9-auth-ssl - tasks: - - name: .latest .replica_set - display_name: Encryption macOS py3.9 Auth SSL - run_on: - - macos-14 - batchtime: 10080 - expansions: - AUTH: auth - SSL: ssl - test_encryption: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 - tags: [encryption_tag] -- name: encryption-macos-py3.13-auth-nossl - tasks: - - name: .latest .replica_set - display_name: Encryption macOS py3.13 Auth NoSSL - run_on: - - macos-14 - batchtime: 10080 - expansions: - AUTH: auth - SSL: nossl - test_encryption: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 - tags: [encryption_tag] -- name: encryption-crypt_shared-macos-py3.9-auth-ssl - tasks: - - name: .latest .replica_set - display_name: Encryption crypt_shared macOS py3.9 Auth SSL - run_on: - - macos-14 - batchtime: 10080 - expansions: - AUTH: auth - SSL: ssl - test_encryption: "true" - test_crypt_shared: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 - tags: [encryption_tag] -- name: encryption-crypt_shared-macos-py3.13-auth-nossl - tasks: - - name: .latest .replica_set - display_name: Encryption crypt_shared macOS py3.13 Auth NoSSL - run_on: - - macos-14 - batchtime: 10080 - expansions: - AUTH: auth - SSL: nossl - test_encryption: "true" - test_crypt_shared: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 - tags: [encryption_tag] -- name: encryption-win64-py3.9-auth-ssl - tasks: - - name: .latest .replica_set - display_name: Encryption Win64 py3.9 Auth SSL - run_on: - - windows-64-vsMulti-small - batchtime: 10080 - expansions: - AUTH: auth - SSL: ssl - test_encryption: "true" - PYTHON_BINARY: C:/python/Python39/python.exe - tags: [encryption_tag] -- name: encryption-win64-py3.13-auth-nossl - tasks: - - name: .latest .replica_set - display_name: Encryption Win64 py3.13 Auth NoSSL - run_on: - - windows-64-vsMulti-small - batchtime: 10080 - expansions: - AUTH: auth - SSL: nossl - test_encryption: "true" - PYTHON_BINARY: C:/python/Python313/python.exe - tags: [encryption_tag] -- name: encryption-crypt_shared-win64-py3.9-auth-ssl - tasks: - - name: .latest .replica_set - display_name: Encryption crypt_shared Win64 py3.9 Auth SSL - run_on: - - windows-64-vsMulti-small - batchtime: 10080 - expansions: - AUTH: auth - SSL: ssl - test_encryption: "true" - test_crypt_shared: "true" - PYTHON_BINARY: C:/python/Python39/python.exe - tags: [encryption_tag] -- name: encryption-crypt_shared-win64-py3.13-auth-nossl - tasks: - - name: .latest .replica_set - display_name: Encryption crypt_shared Win64 py3.13 Auth NoSSL - run_on: - - windows-64-vsMulti-small - batchtime: 10080 - expansions: - AUTH: auth - SSL: nossl - test_encryption: "true" - test_crypt_shared: "true" - PYTHON_BINARY: C:/python/Python313/python.exe - tags: [encryption_tag] - -# Compressor tests. -- name: snappy-compression-rhel8-py3.9-no-c - tasks: - - name: .standalone - display_name: snappy compression RHEL8 py3.9 No C - run_on: - - rhel87-small - expansions: - COMPRESSORS: snappy - NO_EXT: "1" - PYTHON_BINARY: /opt/python/3.9/bin/python3 -- name: snappy-compression-rhel8-py3.10 - tasks: - - name: .standalone - display_name: snappy compression RHEL8 py3.10 - run_on: - - rhel87-small - expansions: - COMPRESSORS: snappy - PYTHON_BINARY: /opt/python/3.10/bin/python3 -- name: zlib-compression-rhel8-py3.11-no-c - tasks: - - name: .standalone - display_name: zlib compression RHEL8 py3.11 No C - run_on: - - rhel87-small - expansions: - COMPRESSORS: zlib - NO_EXT: "1" - PYTHON_BINARY: /opt/python/3.11/bin/python3 -- name: zlib-compression-rhel8-py3.12 - tasks: - - name: .standalone - display_name: zlib compression RHEL8 py3.12 - run_on: - - rhel87-small - expansions: - COMPRESSORS: zlib - PYTHON_BINARY: /opt/python/3.12/bin/python3 -- name: zstd-compression-rhel8-py3.13-no-c - tasks: - - name: .standalone !.4.0 - display_name: zstd compression RHEL8 py3.13 No C - run_on: - - rhel87-small - expansions: - COMPRESSORS: zstd - NO_EXT: "1" - PYTHON_BINARY: /opt/python/3.13/bin/python3 -- name: zstd-compression-rhel8-py3.9 - tasks: - - name: .standalone !.4.0 - display_name: zstd compression RHEL8 py3.9 - run_on: - - rhel87-small - expansions: - COMPRESSORS: zstd - PYTHON_BINARY: /opt/python/3.9/bin/python3 -- name: snappy-compression-rhel8-pypy3.9 - tasks: - - name: .standalone - display_name: snappy compression RHEL8 pypy3.9 - run_on: - - rhel87-small - expansions: - COMPRESSORS: snappy - PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 -- name: zlib-compression-rhel8-pypy3.10 - tasks: - - name: .standalone - display_name: zlib compression RHEL8 pypy3.10 - run_on: - - rhel87-small - expansions: - COMPRESSORS: zlib - PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 -- name: zstd-compression-rhel8-pypy3.9 - tasks: - - name: .standalone !.4.0 - display_name: zstd compression RHEL8 pypy3.9 - run_on: - - rhel87-small - expansions: - COMPRESSORS: zstd - PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 - -# Enterprise auth tests. -- name: enterprise-auth-macos-py3.9-auth - tasks: - - name: test-enterprise-auth - display_name: Enterprise Auth macOS py3.9 Auth - run_on: - - macos-14 - expansions: - AUTH: auth - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 -- name: enterprise-auth-rhel8-py3.10-auth - tasks: - - name: test-enterprise-auth - display_name: Enterprise Auth RHEL8 py3.10 Auth - run_on: - - rhel87-small - expansions: - AUTH: auth - PYTHON_BINARY: /opt/python/3.10/bin/python3 -- name: enterprise-auth-rhel8-py3.11-auth - tasks: - - name: test-enterprise-auth - display_name: Enterprise Auth RHEL8 py3.11 Auth - run_on: - - rhel87-small - expansions: - AUTH: auth - PYTHON_BINARY: /opt/python/3.11/bin/python3 -- name: enterprise-auth-rhel8-py3.12-auth - tasks: - - name: test-enterprise-auth - display_name: Enterprise Auth RHEL8 py3.12 Auth - run_on: - - rhel87-small - expansions: - AUTH: auth - PYTHON_BINARY: /opt/python/3.12/bin/python3 -- name: enterprise-auth-win64-py3.13-auth - tasks: - - name: test-enterprise-auth - display_name: Enterprise Auth Win64 py3.13 Auth - run_on: - - windows-64-vsMulti-small - expansions: - AUTH: auth - PYTHON_BINARY: C:/python/Python313/python.exe -- name: enterprise-auth-rhel8-pypy3.9-auth - tasks: - - name: test-enterprise-auth - display_name: Enterprise Auth RHEL8 pypy3.9 Auth - run_on: - - rhel87-small - expansions: - AUTH: auth - PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 -- name: enterprise-auth-rhel8-pypy3.10-auth - tasks: - - name: test-enterprise-auth - display_name: Enterprise Auth RHEL8 pypy3.10 Auth - run_on: - - rhel87-small - expansions: - AUTH: auth - PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 - -# PyOpenSSL tests. -- name: pyopenssl-macos-py3.9 - tasks: - - name: .replica_set - - name: .7.0 - display_name: PyOpenSSL macOS py3.9 - run_on: - - macos-14 - batchtime: 10080 - expansions: - AUTH: noauth - test_pyopenssl: "true" - SSL: ssl - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 -- name: pyopenssl-rhel8-py3.10 - tasks: - - name: .replica_set - - name: .7.0 - display_name: PyOpenSSL RHEL8 py3.10 - run_on: - - rhel87-small - batchtime: 10080 - expansions: - AUTH: auth - test_pyopenssl: "true" - SSL: ssl - PYTHON_BINARY: /opt/python/3.10/bin/python3 -- name: pyopenssl-rhel8-py3.11 - tasks: - - name: .replica_set - - name: .7.0 - display_name: PyOpenSSL RHEL8 py3.11 - run_on: - - rhel87-small - batchtime: 10080 - expansions: - AUTH: auth - test_pyopenssl: "true" - SSL: ssl - PYTHON_BINARY: /opt/python/3.11/bin/python3 -- name: pyopenssl-rhel8-py3.12 - tasks: - - name: .replica_set - - name: .7.0 - display_name: PyOpenSSL RHEL8 py3.12 - run_on: - - rhel87-small - batchtime: 10080 - expansions: - AUTH: auth - test_pyopenssl: "true" - SSL: ssl - PYTHON_BINARY: /opt/python/3.12/bin/python3 -- name: pyopenssl-win64-py3.13 - tasks: - - name: .replica_set - - name: .7.0 - display_name: PyOpenSSL Win64 py3.13 - run_on: - - windows-64-vsMulti-small - batchtime: 10080 - expansions: - AUTH: auth - test_pyopenssl: "true" - SSL: ssl - PYTHON_BINARY: C:/python/Python313/python.exe -- name: pyopenssl-rhel8-pypy3.9 - tasks: - - name: .replica_set - - name: .7.0 - display_name: PyOpenSSL RHEL8 pypy3.9 - run_on: - - rhel87-small - batchtime: 10080 - expansions: - AUTH: auth - test_pyopenssl: "true" - SSL: ssl - PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 -- name: pyopenssl-rhel8-pypy3.10 - tasks: - - name: .replica_set - - name: .7.0 - display_name: PyOpenSSL RHEL8 pypy3.10 - run_on: - - rhel87-small - batchtime: 10080 - expansions: - AUTH: auth - test_pyopenssl: "true" - SSL: ssl - PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 - -# Storage Engine tests. -- name: storage-inmemory-rhel8-py3.9 - tasks: - - name: .standalone .4.0 - - name: .standalone .4.4 - - name: .standalone .5.0 - - name: .standalone .6.0 - - name: .standalone .7.0 - - name: .standalone .8.0 - - name: .standalone .rapid - - name: .standalone .latest - display_name: Storage InMemory RHEL8 py3.9 - run_on: - - rhel87-small - expansions: - STORAGE_ENGINE: inmemory - PYTHON_BINARY: /opt/python/3.9/bin/python3 -- name: storage-mmapv1-rhel8-py3.9 - tasks: - - name: .standalone .4.0 - - name: .replica_set .4.0 - display_name: Storage MMAPv1 RHEL8 py3.9 - run_on: - - rhel87-small - expansions: - STORAGE_ENGINE: mmapv1 - PYTHON_BINARY: /opt/python/3.9/bin/python3 - -# Versioned API tests. -- name: versioned-api-require-v1-rhel8-py3.9-auth - tasks: - - name: .standalone .5.0 - - name: .standalone .6.0 - - name: .standalone .7.0 - - name: .standalone .8.0 - - name: .standalone .rapid - - name: .standalone .latest - display_name: Versioned API require v1 RHEL8 py3.9 Auth - run_on: - - rhel87-small - expansions: - AUTH: auth - REQUIRE_API_VERSION: "1" - MONGODB_API_VERSION: "1" - PYTHON_BINARY: /opt/python/3.9/bin/python3 - tags: [versionedApi_tag] -- name: versioned-api-accept-v2-rhel8-py3.9-auth - tasks: - - name: .standalone .5.0 - - name: .standalone .6.0 - - name: .standalone .7.0 - - name: .standalone .8.0 - - name: .standalone .rapid - - name: .standalone .latest - display_name: Versioned API accept v2 RHEL8 py3.9 Auth - run_on: - - rhel87-small - expansions: - AUTH: auth - ORCHESTRATION_FILE: versioned-api-testing.json - PYTHON_BINARY: /opt/python/3.9/bin/python3 - tags: [versionedApi_tag] -- name: versioned-api-require-v1-rhel8-py3.13-auth - tasks: - - name: .standalone .5.0 - - name: .standalone .6.0 - - name: .standalone .7.0 - - name: .standalone .8.0 - - name: .standalone .rapid - - name: .standalone .latest - display_name: Versioned API require v1 RHEL8 py3.13 Auth - run_on: - - rhel87-small - expansions: - AUTH: auth - REQUIRE_API_VERSION: "1" - MONGODB_API_VERSION: "1" - PYTHON_BINARY: /opt/python/3.13/bin/python3 - tags: [versionedApi_tag] -- name: versioned-api-accept-v2-rhel8-py3.13-auth - tasks: - - name: .standalone .5.0 - - name: .standalone .6.0 - - name: .standalone .7.0 - - name: .standalone .8.0 - - name: .standalone .rapid - - name: .standalone .latest - display_name: Versioned API accept v2 RHEL8 py3.13 Auth - run_on: - - rhel87-small - expansions: - AUTH: auth - ORCHESTRATION_FILE: versioned-api-testing.json - PYTHON_BINARY: /opt/python/3.13/bin/python3 - tags: [versionedApi_tag] - -# Green framework tests. -- name: eventlet-rhel8-py3.9 - tasks: - - name: .standalone - display_name: Eventlet RHEL8 py3.9 - run_on: - - rhel87-small - expansions: - GREEN_FRAMEWORK: eventlet - AUTH: auth - SSL: ssl - PYTHON_BINARY: /opt/python/3.9/bin/python3 -- name: gevent-rhel8-py3.9 - tasks: - - name: .standalone - display_name: Gevent RHEL8 py3.9 - run_on: - - rhel87-small - expansions: - GREEN_FRAMEWORK: gevent - AUTH: auth - SSL: ssl - PYTHON_BINARY: /opt/python/3.9/bin/python3 -- name: eventlet-rhel8-py3.12 - tasks: - - name: .standalone - display_name: Eventlet RHEL8 py3.12 - run_on: - - rhel87-small - expansions: - GREEN_FRAMEWORK: eventlet - AUTH: auth - SSL: ssl - PYTHON_BINARY: /opt/python/3.12/bin/python3 -- name: gevent-rhel8-py3.12 - tasks: - - name: .standalone - display_name: Gevent RHEL8 py3.12 - run_on: - - rhel87-small - expansions: - GREEN_FRAMEWORK: gevent - AUTH: auth - SSL: ssl - PYTHON_BINARY: /opt/python/3.12/bin/python3 - -# No C Ext tests. -- name: no-c-ext-rhel8-py3.9 - tasks: - - name: .standalone - display_name: No C Ext RHEL8 py3.9 - run_on: - - rhel87-small - expansions: - NO_EXT: "1" - PYTHON_BINARY: /opt/python/3.9/bin/python3 -- name: no-c-ext-rhel8-py3.10 - tasks: - - name: .replica_set - display_name: No C Ext RHEL8 py3.10 - run_on: - - rhel87-small - expansions: - NO_EXT: "1" - PYTHON_BINARY: /opt/python/3.10/bin/python3 -- name: no-c-ext-rhel8-py3.11 - tasks: - - name: .sharded_cluster - display_name: No C Ext RHEL8 py3.11 - run_on: - - rhel87-small - expansions: - NO_EXT: "1" - PYTHON_BINARY: /opt/python/3.11/bin/python3 -- name: no-c-ext-rhel8-py3.12 - tasks: - - name: .standalone - display_name: No C Ext RHEL8 py3.12 - run_on: - - rhel87-small - expansions: - NO_EXT: "1" - PYTHON_BINARY: /opt/python/3.12/bin/python3 -- name: no-c-ext-rhel8-py3.13 - tasks: - - name: .replica_set - display_name: No C Ext RHEL8 py3.13 - run_on: - - rhel87-small - expansions: - NO_EXT: "1" - PYTHON_BINARY: /opt/python/3.13/bin/python3 - -# Atlas Data Lake tests. -- name: atlas-data-lake-rhel8-py3.9-no-c - tasks: - - name: atlas-data-lake-tests - display_name: Atlas Data Lake RHEL8 py3.9 No C - run_on: - - rhel87-small - expansions: - NO_EXT: "1" - PYTHON_BINARY: /opt/python/3.9/bin/python3 -- name: atlas-data-lake-rhel8-py3.9 - tasks: - - name: atlas-data-lake-tests - display_name: Atlas Data Lake RHEL8 py3.9 - run_on: - - rhel87-small - expansions: - PYTHON_BINARY: /opt/python/3.9/bin/python3 -- name: atlas-data-lake-rhel8-py3.13-no-c - tasks: - - name: atlas-data-lake-tests - display_name: Atlas Data Lake RHEL8 py3.13 No C - run_on: - - rhel87-small - expansions: - NO_EXT: "1" - PYTHON_BINARY: /opt/python/3.13/bin/python3 -- name: atlas-data-lake-rhel8-py3.13 - tasks: - - name: atlas-data-lake-tests - display_name: Atlas Data Lake RHEL8 py3.13 - run_on: - - rhel87-small - expansions: - PYTHON_BINARY: /opt/python/3.13/bin/python3 - -# Mod_wsgi tests. -- name: mod_wsgi-ubuntu-22-py3.9 - tasks: - - name: mod-wsgi-standalone - - name: mod-wsgi-replica-set - - name: mod-wsgi-embedded-mode-standalone - - name: mod-wsgi-embedded-mode-replica-set - display_name: mod_wsgi Ubuntu-22 py3.9 - run_on: - - ubuntu2204-small - expansions: - MOD_WSGI_VERSION: "4" - PYTHON_BINARY: /opt/python/3.9/bin/python3 -- name: mod_wsgi-ubuntu-22-py3.13 - tasks: - - name: mod-wsgi-standalone - - name: mod-wsgi-replica-set - - name: mod-wsgi-embedded-mode-standalone - - name: mod-wsgi-embedded-mode-replica-set - display_name: mod_wsgi Ubuntu-22 py3.13 - run_on: - - ubuntu2204-small - expansions: - MOD_WSGI_VERSION: "4" - PYTHON_BINARY: /opt/python/3.13/bin/python3 - -# Disable test commands variants. -- name: disable-test-commands-rhel8-py3.9 - tasks: - - name: .latest - display_name: Disable test commands RHEL8 py3.9 - run_on: - - rhel87-small - expansions: - AUTH: auth - SSL: ssl - DISABLE_TEST_COMMANDS: "1" - PYTHON_BINARY: /opt/python/3.9/bin/python3 - -# Serverless variants. -- name: serverless-rhel8-py3.9 - tasks: - - name: serverless_task_group - display_name: Serverless RHEL8 py3.9 - run_on: - - rhel87-small - batchtime: 10080 - expansions: - test_serverless: "true" - AUTH: auth - SSL: ssl - PYTHON_BINARY: /opt/python/3.9/bin/python3 -- name: serverless-rhel8-py3.13 - tasks: - - name: serverless_task_group - display_name: Serverless RHEL8 py3.13 - run_on: - - rhel87-small - batchtime: 10080 - expansions: - test_serverless: "true" - AUTH: auth - SSL: ssl - PYTHON_BINARY: /opt/python/3.13/bin/python3 - -# AWS Auth tests. -- name: aws-auth-ubuntu-20-py3.9 - tasks: - - name: aws-auth-test-4.4 - - name: aws-auth-test-5.0 - - name: aws-auth-test-6.0 - - name: aws-auth-test-7.0 - - name: aws-auth-test-8.0 - - name: aws-auth-test-rapid - - name: aws-auth-test-latest - display_name: AWS Auth Ubuntu-20 py3.9 - run_on: - - ubuntu2004-small - expansions: - PYTHON_BINARY: /opt/python/3.9/bin/python3 -- name: aws-auth-ubuntu-20-py3.13 - tasks: - - name: aws-auth-test-4.4 - - name: aws-auth-test-5.0 - - name: aws-auth-test-6.0 - - name: aws-auth-test-7.0 - - name: aws-auth-test-8.0 - - name: aws-auth-test-rapid - - name: aws-auth-test-latest - display_name: AWS Auth Ubuntu-20 py3.13 - run_on: - - ubuntu2004-small - expansions: - PYTHON_BINARY: /opt/python/3.13/bin/python3 -- name: aws-auth-win64-py3.9 - tasks: - - name: aws-auth-test-4.4 - - name: aws-auth-test-5.0 - - name: aws-auth-test-6.0 - - name: aws-auth-test-7.0 - - name: aws-auth-test-8.0 - - name: aws-auth-test-rapid - - name: aws-auth-test-latest - display_name: AWS Auth Win64 py3.9 - run_on: - - windows-64-vsMulti-small - expansions: - skip_ECS_auth_test: "true" - PYTHON_BINARY: C:/python/Python39/python.exe -- name: aws-auth-win64-py3.13 - tasks: - - name: aws-auth-test-4.4 - - name: aws-auth-test-5.0 - - name: aws-auth-test-6.0 - - name: aws-auth-test-7.0 - - name: aws-auth-test-8.0 - - name: aws-auth-test-rapid - - name: aws-auth-test-latest - display_name: AWS Auth Win64 py3.13 - run_on: - - windows-64-vsMulti-small - expansions: - skip_ECS_auth_test: "true" - PYTHON_BINARY: C:/python/Python313/python.exe -- name: aws-auth-macos-py3.9 - tasks: - - name: aws-auth-test-4.4 - - name: aws-auth-test-5.0 - - name: aws-auth-test-6.0 - - name: aws-auth-test-7.0 - - name: aws-auth-test-8.0 - - name: aws-auth-test-rapid - - name: aws-auth-test-latest - display_name: AWS Auth macOS py3.9 - run_on: - - macos-14 - expansions: - skip_ECS_auth_test: "true" - skip_EC2_auth_test: "true" - skip_web_identity_auth_test: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 -- name: aws-auth-macos-py3.13 - tasks: - - name: aws-auth-test-4.4 - - name: aws-auth-test-5.0 - - name: aws-auth-test-6.0 - - name: aws-auth-test-7.0 - - name: aws-auth-test-8.0 - - name: aws-auth-test-rapid - - name: aws-auth-test-latest - display_name: AWS Auth macOS py3.13 - run_on: - - macos-14 - expansions: - skip_ECS_auth_test: "true" - skip_EC2_auth_test: "true" - skip_web_identity_auth_test: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 - -# Other hosts tests. -- name: openssl-1.0.2-rhel7-py3.9-auth-ssl - tasks: - - name: .5.0 .standalone - display_name: OpenSSL 1.0.2 RHEL7 py3.9 Auth SSL - run_on: - - rhel79-small - batchtime: 10080 - expansions: - SKIP_HATCH: "true" - AUTH: auth - SSL: ssl - PYTHON_BINARY: /opt/python/3.9/bin/python3 -- name: openssl-1.0.2-rhel7-py3.9-noauth-ssl - tasks: - - name: .5.0 .standalone - display_name: OpenSSL 1.0.2 RHEL7 py3.9 NoAuth SSL - run_on: - - rhel79-small - batchtime: 10080 - expansions: - SKIP_HATCH: "true" - AUTH: noauth - SSL: ssl - PYTHON_BINARY: /opt/python/3.9/bin/python3 -- name: openssl-1.0.2-rhel7-py3.9-noauth-nossl - tasks: - - name: .5.0 .standalone - display_name: OpenSSL 1.0.2 RHEL7 py3.9 NoAuth NoSSL - run_on: - - rhel79-small - batchtime: 10080 - expansions: - SKIP_HATCH: "true" - AUTH: noauth - SSL: nossl - PYTHON_BINARY: /opt/python/3.9/bin/python3 -- name: other-hosts-rhel9-fips-auth-ssl - tasks: - - name: .6.0 .standalone - display_name: Other hosts RHEL9-FIPS Auth SSL - run_on: - - rhel92-fips - batchtime: 10080 - expansions: - SKIP_HATCH: "true" - AUTH: auth - SSL: ssl -- name: other-hosts-rhel9-fips-noauth-ssl - tasks: - - name: .6.0 .standalone - display_name: Other hosts RHEL9-FIPS NoAuth SSL - run_on: - - rhel92-fips - batchtime: 10080 - expansions: - SKIP_HATCH: "true" - AUTH: noauth - SSL: ssl -- name: other-hosts-rhel9-fips-noauth-nossl - tasks: - - name: .6.0 .standalone - display_name: Other hosts RHEL9-FIPS NoAuth NoSSL - run_on: - - rhel92-fips - batchtime: 10080 - expansions: - SKIP_HATCH: "true" - AUTH: noauth - SSL: nossl -- name: other-hosts-rhel8-zseries-auth-ssl - tasks: - - name: .6.0 .standalone - display_name: Other hosts RHEL8-zseries Auth SSL - run_on: - - rhel8-zseries-small - batchtime: 10080 - expansions: - SKIP_HATCH: "true" - AUTH: auth - SSL: ssl -- name: other-hosts-rhel8-zseries-noauth-ssl - tasks: - - name: .6.0 .standalone - display_name: Other hosts RHEL8-zseries NoAuth SSL - run_on: - - rhel8-zseries-small - batchtime: 10080 - expansions: - SKIP_HATCH: "true" - AUTH: noauth - SSL: ssl -- name: other-hosts-rhel8-zseries-noauth-nossl - tasks: - - name: .6.0 .standalone - display_name: Other hosts RHEL8-zseries NoAuth NoSSL - run_on: - - rhel8-zseries-small - batchtime: 10080 - expansions: - SKIP_HATCH: "true" - AUTH: noauth - SSL: nossl -- name: other-hosts-rhel8-power8-auth-ssl - tasks: - - name: .6.0 .standalone - display_name: Other hosts RHEL8-POWER8 Auth SSL - run_on: - - rhel8-power-small - batchtime: 10080 - expansions: - SKIP_HATCH: "true" - AUTH: auth - SSL: ssl -- name: other-hosts-rhel8-power8-noauth-ssl - tasks: - - name: .6.0 .standalone - display_name: Other hosts RHEL8-POWER8 NoAuth SSL - run_on: - - rhel8-power-small - batchtime: 10080 - expansions: - SKIP_HATCH: "true" - AUTH: noauth - SSL: ssl -- name: other-hosts-rhel8-power8-noauth-nossl - tasks: - - name: .6.0 .standalone - display_name: Other hosts RHEL8-POWER8 NoAuth NoSSL - run_on: - - rhel8-power-small - batchtime: 10080 - expansions: - SKIP_HATCH: "true" - AUTH: noauth - SSL: nossl -- name: other-hosts-rhel8-arm64-auth-ssl - tasks: - - name: .6.0 .standalone - display_name: Other hosts RHEL8-arm64 Auth SSL - run_on: - - rhel82-arm64-small - batchtime: 10080 - expansions: - SKIP_HATCH: "true" - AUTH: auth - SSL: ssl -- name: other-hosts-rhel8-arm64-noauth-ssl - tasks: - - name: .6.0 .standalone - display_name: Other hosts RHEL8-arm64 NoAuth SSL - run_on: - - rhel82-arm64-small - batchtime: 10080 - expansions: - SKIP_HATCH: "true" - AUTH: noauth - SSL: ssl -- name: other-hosts-rhel8-arm64-noauth-nossl - tasks: - - name: .6.0 .standalone - display_name: Other hosts RHEL8-arm64 NoAuth NoSSL - run_on: - - rhel82-arm64-small - batchtime: 10080 - expansions: - SKIP_HATCH: "true" - AUTH: noauth - SSL: nossl -- name: oidc-auth-rhel8 - tasks: - - name: testoidc_task_group - display_name: OIDC Auth RHEL8 - run_on: - - rhel87-small - batchtime: 20160 -- name: oidc-auth-macos - tasks: - - name: testoidc_task_group - display_name: OIDC Auth macOS - run_on: - - macos-14 - batchtime: 20160 -- name: oidc-auth-win64 - tasks: - - name: testoidc_task_group - display_name: OIDC Auth Win64 - run_on: - - windows-64-vsMulti-small - batchtime: 20160 -- name: atlas-connect-rhel8-py3.9 - tasks: - - name: atlas-connect - display_name: Atlas connect RHEL8 py3.9 - run_on: - - rhel87-small - expansions: - PYTHON_BINARY: /opt/python/3.9/bin/python3 -- name: atlas-connect-rhel8-py3.13 - tasks: - - name: atlas-connect - display_name: Atlas connect RHEL8 py3.13 - run_on: - - rhel87-small - expansions: - PYTHON_BINARY: /opt/python/3.13/bin/python3 -- name: doctests-rhel8-py3.9 - tasks: - - name: doctests - display_name: Doctests RHEL8 py3.9 - run_on: - - rhel87-small - expansions: - PYTHON_BINARY: /opt/python/3.9/bin/python3 -- name: mockupdb-tests-rhel8-py3.9 - tasks: - - name: mockupdb - display_name: MockupDB Tests RHEL8 py3.9 - run_on: - - rhel87-small - expansions: - PYTHON_BINARY: /opt/python/3.9/bin/python3 -- name: search-index-helpers-rhel8-py3.9 - tasks: - - name: test_atlas_task_group_search_indexes - display_name: Search Index Helpers RHEL8 py3.9 - run_on: - - rhel87-small - expansions: - PYTHON_BINARY: /opt/python/3.9/bin/python3 - - name: "no-server" display_name: "No server test" run_on: @@ -3936,348 +1991,6 @@ buildvariants: tasks: - name: "coverage-report" -# OCSP tests. -- name: ocsp-test-rhel8-v4.4-py3.9 - tasks: - - name: .ocsp - display_name: OCSP test RHEL8 v4.4 py3.9 - run_on: - - rhel87-small - batchtime: 20160 - expansions: - AUTH: noauth - SSL: ssl - TOPOLOGY: server - VERSION: "4.4" - PYTHON_BINARY: /opt/python/3.9/bin/python3 -- name: ocsp-test-rhel8-v5.0-py3.10 - tasks: - - name: .ocsp - display_name: OCSP test RHEL8 v5.0 py3.10 - run_on: - - rhel87-small - batchtime: 20160 - expansions: - AUTH: noauth - SSL: ssl - TOPOLOGY: server - VERSION: "5.0" - PYTHON_BINARY: /opt/python/3.10/bin/python3 -- name: ocsp-test-rhel8-v6.0-py3.11 - tasks: - - name: .ocsp - display_name: OCSP test RHEL8 v6.0 py3.11 - run_on: - - rhel87-small - batchtime: 20160 - expansions: - AUTH: noauth - SSL: ssl - TOPOLOGY: server - VERSION: "6.0" - PYTHON_BINARY: /opt/python/3.11/bin/python3 -- name: ocsp-test-rhel8-v7.0-py3.12 - tasks: - - name: .ocsp - display_name: OCSP test RHEL8 v7.0 py3.12 - run_on: - - rhel87-small - batchtime: 20160 - expansions: - AUTH: noauth - SSL: ssl - TOPOLOGY: server - VERSION: "7.0" - PYTHON_BINARY: /opt/python/3.12/bin/python3 -- name: ocsp-test-rhel8-v8.0-py3.13 - tasks: - - name: .ocsp - display_name: OCSP test RHEL8 v8.0 py3.13 - run_on: - - rhel87-small - batchtime: 20160 - expansions: - AUTH: noauth - SSL: ssl - TOPOLOGY: server - VERSION: "8.0" - PYTHON_BINARY: /opt/python/3.13/bin/python3 -- name: ocsp-test-rhel8-rapid-pypy3.9 - tasks: - - name: .ocsp - display_name: OCSP test RHEL8 rapid pypy3.9 - run_on: - - rhel87-small - batchtime: 20160 - expansions: - AUTH: noauth - SSL: ssl - TOPOLOGY: server - VERSION: rapid - PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 -- name: ocsp-test-rhel8-latest-pypy3.10 - tasks: - - name: .ocsp - display_name: OCSP test RHEL8 latest pypy3.10 - run_on: - - rhel87-small - batchtime: 20160 - expansions: - AUTH: noauth - SSL: ssl - TOPOLOGY: server - VERSION: latest - PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 -- name: ocsp-test-win64-v4.4-py3.9 - tasks: - - name: .ocsp-rsa !.ocsp-staple - display_name: OCSP test Win64 v4.4 py3.9 - run_on: - - windows-64-vsMulti-small - batchtime: 20160 - expansions: - AUTH: noauth - SSL: ssl - TOPOLOGY: server - VERSION: "4.4" - PYTHON_BINARY: C:/python/Python39/python.exe -- name: ocsp-test-win64-v8.0-py3.13 - tasks: - - name: .ocsp-rsa !.ocsp-staple - display_name: OCSP test Win64 v8.0 py3.13 - run_on: - - windows-64-vsMulti-small - batchtime: 20160 - expansions: - AUTH: noauth - SSL: ssl - TOPOLOGY: server - VERSION: "8.0" - PYTHON_BINARY: C:/python/Python313/python.exe -- name: ocsp-test-macos-v4.4-py3.9 - tasks: - - name: .ocsp-rsa !.ocsp-staple - display_name: OCSP test macOS v4.4 py3.9 - run_on: - - macos-14 - batchtime: 20160 - expansions: - AUTH: noauth - SSL: ssl - TOPOLOGY: server - VERSION: "4.4" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 -- name: ocsp-test-macos-v8.0-py3.13 - tasks: - - name: .ocsp-rsa !.ocsp-staple - display_name: OCSP test macOS v8.0 py3.13 - run_on: - - macos-14 - batchtime: 20160 - expansions: - AUTH: noauth - SSL: ssl - TOPOLOGY: server - VERSION: "8.0" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 - -# Load balancer tests -- name: load-balancer-rhel8-v6.0-py3.9-auth-ssl - tasks: - - name: load-balancer-test - display_name: Load Balancer RHEL8 v6.0 py3.9 Auth SSL - run_on: - - rhel87-small - batchtime: 10080 - expansions: - VERSION: "6.0" - AUTH: auth - SSL: ssl - test_loadbalancer: "true" - PYTHON_BINARY: /opt/python/3.9/bin/python3 -- name: load-balancer-rhel8-v6.0-py3.10-noauth-ssl - tasks: - - name: load-balancer-test - display_name: Load Balancer RHEL8 v6.0 py3.10 NoAuth SSL - run_on: - - rhel87-small - batchtime: 10080 - expansions: - VERSION: "6.0" - AUTH: noauth - SSL: ssl - test_loadbalancer: "true" - PYTHON_BINARY: /opt/python/3.10/bin/python3 -- name: load-balancer-rhel8-v6.0-py3.11-noauth-nossl - tasks: - - name: load-balancer-test - display_name: Load Balancer RHEL8 v6.0 py3.11 NoAuth NoSSL - run_on: - - rhel87-small - batchtime: 10080 - expansions: - VERSION: "6.0" - AUTH: noauth - SSL: nossl - test_loadbalancer: "true" - PYTHON_BINARY: /opt/python/3.11/bin/python3 -- name: load-balancer-rhel8-v7.0-py3.12-auth-ssl - tasks: - - name: load-balancer-test - display_name: Load Balancer RHEL8 v7.0 py3.12 Auth SSL - run_on: - - rhel87-small - batchtime: 10080 - expansions: - VERSION: "7.0" - AUTH: auth - SSL: ssl - test_loadbalancer: "true" - PYTHON_BINARY: /opt/python/3.12/bin/python3 -- name: load-balancer-rhel8-v7.0-py3.13-noauth-ssl - tasks: - - name: load-balancer-test - display_name: Load Balancer RHEL8 v7.0 py3.13 NoAuth SSL - run_on: - - rhel87-small - batchtime: 10080 - expansions: - VERSION: "7.0" - AUTH: noauth - SSL: ssl - test_loadbalancer: "true" - PYTHON_BINARY: /opt/python/3.13/bin/python3 -- name: load-balancer-rhel8-v7.0-pypy3.9-noauth-nossl - tasks: - - name: load-balancer-test - display_name: Load Balancer RHEL8 v7.0 pypy3.9 NoAuth NoSSL - run_on: - - rhel87-small - batchtime: 10080 - expansions: - VERSION: "7.0" - AUTH: noauth - SSL: nossl - test_loadbalancer: "true" - PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 -- name: load-balancer-rhel8-v8.0-pypy3.10-auth-ssl - tasks: - - name: load-balancer-test - display_name: Load Balancer RHEL8 v8.0 pypy3.10 Auth SSL - run_on: - - rhel87-small - batchtime: 10080 - expansions: - VERSION: "8.0" - AUTH: auth - SSL: ssl - test_loadbalancer: "true" - PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 -- name: load-balancer-rhel8-v8.0-py3.9-noauth-ssl - tasks: - - name: load-balancer-test - display_name: Load Balancer RHEL8 v8.0 py3.9 NoAuth SSL - run_on: - - rhel87-small - batchtime: 10080 - expansions: - VERSION: "8.0" - AUTH: noauth - SSL: ssl - test_loadbalancer: "true" - PYTHON_BINARY: /opt/python/3.9/bin/python3 -- name: load-balancer-rhel8-v8.0-py3.10-noauth-nossl - tasks: - - name: load-balancer-test - display_name: Load Balancer RHEL8 v8.0 py3.10 NoAuth NoSSL - run_on: - - rhel87-small - batchtime: 10080 - expansions: - VERSION: "8.0" - AUTH: noauth - SSL: nossl - test_loadbalancer: "true" - PYTHON_BINARY: /opt/python/3.10/bin/python3 -- name: load-balancer-rhel8-latest-py3.11-auth-ssl - tasks: - - name: load-balancer-test - display_name: Load Balancer RHEL8 latest py3.11 Auth SSL - run_on: - - rhel87-small - batchtime: 10080 - expansions: - VERSION: latest - AUTH: auth - SSL: ssl - test_loadbalancer: "true" - PYTHON_BINARY: /opt/python/3.11/bin/python3 -- name: load-balancer-rhel8-latest-py3.12-noauth-ssl - tasks: - - name: load-balancer-test - display_name: Load Balancer RHEL8 latest py3.12 NoAuth SSL - run_on: - - rhel87-small - batchtime: 10080 - expansions: - VERSION: latest - AUTH: noauth - SSL: ssl - test_loadbalancer: "true" - PYTHON_BINARY: /opt/python/3.12/bin/python3 -- name: load-balancer-rhel8-latest-py3.13-noauth-nossl - tasks: - - name: load-balancer-test - display_name: Load Balancer RHEL8 latest py3.13 NoAuth NoSSL - run_on: - - rhel87-small - batchtime: 10080 - expansions: - VERSION: latest - AUTH: noauth - SSL: nossl - test_loadbalancer: "true" - PYTHON_BINARY: /opt/python/3.13/bin/python3 -- name: load-balancer-rhel8-rapid-pypy3.9-auth-ssl - tasks: - - name: load-balancer-test - display_name: Load Balancer RHEL8 rapid pypy3.9 Auth SSL - run_on: - - rhel87-small - batchtime: 10080 - expansions: - VERSION: rapid - AUTH: auth - SSL: ssl - test_loadbalancer: "true" - PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 -- name: load-balancer-rhel8-rapid-pypy3.10-noauth-ssl - tasks: - - name: load-balancer-test - display_name: Load Balancer RHEL8 rapid pypy3.10 NoAuth SSL - run_on: - - rhel87-small - batchtime: 10080 - expansions: - VERSION: rapid - AUTH: noauth - SSL: ssl - test_loadbalancer: "true" - PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 -- name: load-balancer-rhel8-rapid-py3.9-noauth-nossl - tasks: - - name: load-balancer-test - display_name: Load Balancer RHEL8 rapid py3.9 NoAuth NoSSL - run_on: - - rhel87-small - batchtime: 10080 - expansions: - VERSION: rapid - AUTH: noauth - SSL: nossl - test_loadbalancer: "true" - PYTHON_BINARY: /opt/python/3.9/bin/python3 - - name: testazureoidc-variant display_name: "OIDC Auth Azure" run_on: ubuntu2204-small diff --git a/.evergreen/generated_configs/variants.yml b/.evergreen/generated_configs/variants.yml new file mode 100644 index 0000000000..52f8c673b3 --- /dev/null +++ b/.evergreen/generated_configs/variants.yml @@ -0,0 +1,2204 @@ +buildvariants: + # Alternative hosts tests + - name: openssl-1.0.2-rhel7-py3.9-auth-ssl + tasks: + - name: .5.0 .standalone + display_name: OpenSSL 1.0.2 RHEL7 py3.9 Auth SSL + run_on: + - rhel79-small + batchtime: 10080 + expansions: + SKIP_HATCH: "true" + AUTH: auth + SSL: ssl + PYTHON_BINARY: /opt/python/3.9/bin/python3 + - name: openssl-1.0.2-rhel7-py3.9-noauth-ssl + tasks: + - name: .5.0 .standalone + display_name: OpenSSL 1.0.2 RHEL7 py3.9 NoAuth SSL + run_on: + - rhel79-small + batchtime: 10080 + expansions: + SKIP_HATCH: "true" + AUTH: noauth + SSL: ssl + PYTHON_BINARY: /opt/python/3.9/bin/python3 + - name: openssl-1.0.2-rhel7-py3.9-noauth-nossl + tasks: + - name: .5.0 .standalone + display_name: OpenSSL 1.0.2 RHEL7 py3.9 NoAuth NoSSL + run_on: + - rhel79-small + batchtime: 10080 + expansions: + SKIP_HATCH: "true" + AUTH: noauth + SSL: nossl + PYTHON_BINARY: /opt/python/3.9/bin/python3 + - name: other-hosts-rhel9-fips-auth-ssl + tasks: + - name: .6.0 .standalone + display_name: Other hosts RHEL9-FIPS Auth SSL + run_on: + - rhel92-fips + batchtime: 10080 + expansions: + SKIP_HATCH: "true" + AUTH: auth + SSL: ssl + - name: other-hosts-rhel9-fips-noauth-ssl + tasks: + - name: .6.0 .standalone + display_name: Other hosts RHEL9-FIPS NoAuth SSL + run_on: + - rhel92-fips + batchtime: 10080 + expansions: + SKIP_HATCH: "true" + AUTH: noauth + SSL: ssl + - name: other-hosts-rhel9-fips-noauth-nossl + tasks: + - name: .6.0 .standalone + display_name: Other hosts RHEL9-FIPS NoAuth NoSSL + run_on: + - rhel92-fips + batchtime: 10080 + expansions: + SKIP_HATCH: "true" + AUTH: noauth + SSL: nossl + - name: other-hosts-rhel8-zseries-auth-ssl + tasks: + - name: .6.0 .standalone + display_name: Other hosts RHEL8-zseries Auth SSL + run_on: + - rhel8-zseries-small + batchtime: 10080 + expansions: + SKIP_HATCH: "true" + AUTH: auth + SSL: ssl + - name: other-hosts-rhel8-zseries-noauth-ssl + tasks: + - name: .6.0 .standalone + display_name: Other hosts RHEL8-zseries NoAuth SSL + run_on: + - rhel8-zseries-small + batchtime: 10080 + expansions: + SKIP_HATCH: "true" + AUTH: noauth + SSL: ssl + - name: other-hosts-rhel8-zseries-noauth-nossl + tasks: + - name: .6.0 .standalone + display_name: Other hosts RHEL8-zseries NoAuth NoSSL + run_on: + - rhel8-zseries-small + batchtime: 10080 + expansions: + SKIP_HATCH: "true" + AUTH: noauth + SSL: nossl + - name: other-hosts-rhel8-power8-auth-ssl + tasks: + - name: .6.0 .standalone + display_name: Other hosts RHEL8-POWER8 Auth SSL + run_on: + - rhel8-power-small + batchtime: 10080 + expansions: + SKIP_HATCH: "true" + AUTH: auth + SSL: ssl + - name: other-hosts-rhel8-power8-noauth-ssl + tasks: + - name: .6.0 .standalone + display_name: Other hosts RHEL8-POWER8 NoAuth SSL + run_on: + - rhel8-power-small + batchtime: 10080 + expansions: + SKIP_HATCH: "true" + AUTH: noauth + SSL: ssl + - name: other-hosts-rhel8-power8-noauth-nossl + tasks: + - name: .6.0 .standalone + display_name: Other hosts RHEL8-POWER8 NoAuth NoSSL + run_on: + - rhel8-power-small + batchtime: 10080 + expansions: + SKIP_HATCH: "true" + AUTH: noauth + SSL: nossl + - name: other-hosts-rhel8-arm64-auth-ssl + tasks: + - name: .6.0 .standalone + display_name: Other hosts RHEL8-arm64 Auth SSL + run_on: + - rhel82-arm64-small + batchtime: 10080 + expansions: + SKIP_HATCH: "true" + AUTH: auth + SSL: ssl + - name: other-hosts-rhel8-arm64-noauth-ssl + tasks: + - name: .6.0 .standalone + display_name: Other hosts RHEL8-arm64 NoAuth SSL + run_on: + - rhel82-arm64-small + batchtime: 10080 + expansions: + SKIP_HATCH: "true" + AUTH: noauth + SSL: ssl + - name: other-hosts-rhel8-arm64-noauth-nossl + tasks: + - name: .6.0 .standalone + display_name: Other hosts RHEL8-arm64 NoAuth NoSSL + run_on: + - rhel82-arm64-small + batchtime: 10080 + expansions: + SKIP_HATCH: "true" + AUTH: noauth + SSL: nossl + + # Atlas connect tests + - name: atlas-connect-rhel8-py3.9 + tasks: + - name: atlas-connect + display_name: Atlas connect RHEL8 py3.9 + run_on: + - rhel87-small + expansions: + PYTHON_BINARY: /opt/python/3.9/bin/python3 + - name: atlas-connect-rhel8-py3.13 + tasks: + - name: atlas-connect + display_name: Atlas connect RHEL8 py3.13 + run_on: + - rhel87-small + expansions: + PYTHON_BINARY: /opt/python/3.13/bin/python3 + + # Atlas data lake tests + - name: atlas-data-lake-rhel8-py3.9-no-c + tasks: + - name: atlas-data-lake-tests + display_name: Atlas Data Lake RHEL8 py3.9 No C + run_on: + - rhel87-small + expansions: + NO_EXT: "1" + PYTHON_BINARY: /opt/python/3.9/bin/python3 + - name: atlas-data-lake-rhel8-py3.9 + tasks: + - name: atlas-data-lake-tests + display_name: Atlas Data Lake RHEL8 py3.9 + run_on: + - rhel87-small + expansions: + PYTHON_BINARY: /opt/python/3.9/bin/python3 + - name: atlas-data-lake-rhel8-py3.13-no-c + tasks: + - name: atlas-data-lake-tests + display_name: Atlas Data Lake RHEL8 py3.13 No C + run_on: + - rhel87-small + expansions: + NO_EXT: "1" + PYTHON_BINARY: /opt/python/3.13/bin/python3 + - name: atlas-data-lake-rhel8-py3.13 + tasks: + - name: atlas-data-lake-tests + display_name: Atlas Data Lake RHEL8 py3.13 + run_on: + - rhel87-small + expansions: + PYTHON_BINARY: /opt/python/3.13/bin/python3 + + # Aws auth tests + - name: aws-auth-ubuntu-20-py3.9 + tasks: + - name: aws-auth-test-4.4 + - name: aws-auth-test-5.0 + - name: aws-auth-test-6.0 + - name: aws-auth-test-7.0 + - name: aws-auth-test-8.0 + - name: aws-auth-test-rapid + - name: aws-auth-test-latest + display_name: AWS Auth Ubuntu-20 py3.9 + run_on: + - ubuntu2004-small + expansions: + PYTHON_BINARY: /opt/python/3.9/bin/python3 + - name: aws-auth-ubuntu-20-py3.13 + tasks: + - name: aws-auth-test-4.4 + - name: aws-auth-test-5.0 + - name: aws-auth-test-6.0 + - name: aws-auth-test-7.0 + - name: aws-auth-test-8.0 + - name: aws-auth-test-rapid + - name: aws-auth-test-latest + display_name: AWS Auth Ubuntu-20 py3.13 + run_on: + - ubuntu2004-small + expansions: + PYTHON_BINARY: /opt/python/3.13/bin/python3 + - name: aws-auth-win64-py3.9 + tasks: + - name: aws-auth-test-4.4 + - name: aws-auth-test-5.0 + - name: aws-auth-test-6.0 + - name: aws-auth-test-7.0 + - name: aws-auth-test-8.0 + - name: aws-auth-test-rapid + - name: aws-auth-test-latest + display_name: AWS Auth Win64 py3.9 + run_on: + - windows-64-vsMulti-small + expansions: + skip_ECS_auth_test: "true" + PYTHON_BINARY: C:/python/Python39/python.exe + - name: aws-auth-win64-py3.13 + tasks: + - name: aws-auth-test-4.4 + - name: aws-auth-test-5.0 + - name: aws-auth-test-6.0 + - name: aws-auth-test-7.0 + - name: aws-auth-test-8.0 + - name: aws-auth-test-rapid + - name: aws-auth-test-latest + display_name: AWS Auth Win64 py3.13 + run_on: + - windows-64-vsMulti-small + expansions: + skip_ECS_auth_test: "true" + PYTHON_BINARY: C:/python/Python313/python.exe + - name: aws-auth-macos-py3.9 + tasks: + - name: aws-auth-test-4.4 + - name: aws-auth-test-5.0 + - name: aws-auth-test-6.0 + - name: aws-auth-test-7.0 + - name: aws-auth-test-8.0 + - name: aws-auth-test-rapid + - name: aws-auth-test-latest + display_name: AWS Auth macOS py3.9 + run_on: + - macos-14 + expansions: + skip_ECS_auth_test: "true" + skip_EC2_auth_test: "true" + skip_web_identity_auth_test: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 + - name: aws-auth-macos-py3.13 + tasks: + - name: aws-auth-test-4.4 + - name: aws-auth-test-5.0 + - name: aws-auth-test-6.0 + - name: aws-auth-test-7.0 + - name: aws-auth-test-8.0 + - name: aws-auth-test-rapid + - name: aws-auth-test-latest + display_name: AWS Auth macOS py3.13 + run_on: + - macos-14 + expansions: + skip_ECS_auth_test: "true" + skip_EC2_auth_test: "true" + skip_web_identity_auth_test: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 + + # Compression tests + - name: snappy-compression-rhel8-py3.9-no-c + tasks: + - name: .standalone + display_name: snappy compression RHEL8 py3.9 No C + run_on: + - rhel87-small + expansions: + COMPRESSORS: snappy + NO_EXT: "1" + PYTHON_BINARY: /opt/python/3.9/bin/python3 + - name: snappy-compression-rhel8-py3.10 + tasks: + - name: .standalone + display_name: snappy compression RHEL8 py3.10 + run_on: + - rhel87-small + expansions: + COMPRESSORS: snappy + PYTHON_BINARY: /opt/python/3.10/bin/python3 + - name: zlib-compression-rhel8-py3.11-no-c + tasks: + - name: .standalone + display_name: zlib compression RHEL8 py3.11 No C + run_on: + - rhel87-small + expansions: + COMPRESSORS: zlib + NO_EXT: "1" + PYTHON_BINARY: /opt/python/3.11/bin/python3 + - name: zlib-compression-rhel8-py3.12 + tasks: + - name: .standalone + display_name: zlib compression RHEL8 py3.12 + run_on: + - rhel87-small + expansions: + COMPRESSORS: zlib + PYTHON_BINARY: /opt/python/3.12/bin/python3 + - name: zstd-compression-rhel8-py3.13-no-c + tasks: + - name: .standalone !.4.0 + display_name: zstd compression RHEL8 py3.13 No C + run_on: + - rhel87-small + expansions: + COMPRESSORS: zstd + NO_EXT: "1" + PYTHON_BINARY: /opt/python/3.13/bin/python3 + - name: zstd-compression-rhel8-py3.9 + tasks: + - name: .standalone !.4.0 + display_name: zstd compression RHEL8 py3.9 + run_on: + - rhel87-small + expansions: + COMPRESSORS: zstd + PYTHON_BINARY: /opt/python/3.9/bin/python3 + - name: snappy-compression-rhel8-pypy3.9 + tasks: + - name: .standalone + display_name: snappy compression RHEL8 pypy3.9 + run_on: + - rhel87-small + expansions: + COMPRESSORS: snappy + PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 + - name: zlib-compression-rhel8-pypy3.10 + tasks: + - name: .standalone + display_name: zlib compression RHEL8 pypy3.10 + run_on: + - rhel87-small + expansions: + COMPRESSORS: zlib + PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 + - name: zstd-compression-rhel8-pypy3.9 + tasks: + - name: .standalone !.4.0 + display_name: zstd compression RHEL8 pypy3.9 + run_on: + - rhel87-small + expansions: + COMPRESSORS: zstd + PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 + + # Disable test commands tests + - name: disable-test-commands-rhel8-py3.9 + tasks: + - name: .latest + display_name: Disable test commands RHEL8 py3.9 + run_on: + - rhel87-small + expansions: + AUTH: auth + SSL: ssl + DISABLE_TEST_COMMANDS: "1" + PYTHON_BINARY: /opt/python/3.9/bin/python3 + + # Doctests tests + - name: doctests-rhel8-py3.9 + tasks: + - name: doctests + display_name: Doctests RHEL8 py3.9 + run_on: + - rhel87-small + expansions: + PYTHON_BINARY: /opt/python/3.9/bin/python3 + + # Encryption tests + - name: encryption-rhel8-py3.9-auth-ssl + tasks: + - name: .standalone + - name: .replica_set + - name: .sharded_cluster + display_name: Encryption RHEL8 py3.9 Auth SSL + run_on: + - rhel87-small + batchtime: 10080 + expansions: + AUTH: auth + SSL: ssl + test_encryption: "true" + PYTHON_BINARY: /opt/python/3.9/bin/python3 + tags: [encryption_tag] + - name: encryption-rhel8-py3.13-auth-ssl + tasks: + - name: .standalone + - name: .replica_set + - name: .sharded_cluster + display_name: Encryption RHEL8 py3.13 Auth SSL + run_on: + - rhel87-small + batchtime: 10080 + expansions: + AUTH: auth + SSL: ssl + test_encryption: "true" + PYTHON_BINARY: /opt/python/3.13/bin/python3 + tags: [encryption_tag] + - name: encryption-rhel8-pypy3.10-auth-ssl + tasks: + - name: .standalone + - name: .replica_set + - name: .sharded_cluster + display_name: Encryption RHEL8 pypy3.10 Auth SSL + run_on: + - rhel87-small + batchtime: 10080 + expansions: + AUTH: auth + SSL: ssl + test_encryption: "true" + PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 + tags: [encryption_tag] + - name: encryption-crypt_shared-rhel8-py3.9-auth-ssl + tasks: + - name: .standalone + - name: .replica_set + - name: .sharded_cluster + display_name: Encryption crypt_shared RHEL8 py3.9 Auth SSL + run_on: + - rhel87-small + batchtime: 10080 + expansions: + AUTH: auth + SSL: ssl + test_encryption: "true" + test_crypt_shared: "true" + PYTHON_BINARY: /opt/python/3.9/bin/python3 + tags: [encryption_tag] + - name: encryption-crypt_shared-rhel8-py3.13-auth-ssl + tasks: + - name: .standalone + - name: .replica_set + - name: .sharded_cluster + display_name: Encryption crypt_shared RHEL8 py3.13 Auth SSL + run_on: + - rhel87-small + batchtime: 10080 + expansions: + AUTH: auth + SSL: ssl + test_encryption: "true" + test_crypt_shared: "true" + PYTHON_BINARY: /opt/python/3.13/bin/python3 + tags: [encryption_tag] + - name: encryption-crypt_shared-rhel8-pypy3.10-auth-ssl + tasks: + - name: .standalone + - name: .replica_set + - name: .sharded_cluster + display_name: Encryption crypt_shared RHEL8 pypy3.10 Auth SSL + run_on: + - rhel87-small + batchtime: 10080 + expansions: + AUTH: auth + SSL: ssl + test_encryption: "true" + test_crypt_shared: "true" + PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 + tags: [encryption_tag] + - name: encryption-pyopenssl-rhel8-py3.9-auth-ssl + tasks: + - name: .standalone + - name: .replica_set + - name: .sharded_cluster + display_name: Encryption PyOpenSSL RHEL8 py3.9 Auth SSL + run_on: + - rhel87-small + batchtime: 10080 + expansions: + AUTH: auth + SSL: ssl + test_encryption: "true" + test_encryption_pyopenssl: "true" + PYTHON_BINARY: /opt/python/3.9/bin/python3 + tags: [encryption_tag] + - name: encryption-pyopenssl-rhel8-py3.13-auth-ssl + tasks: + - name: .standalone + - name: .replica_set + - name: .sharded_cluster + display_name: Encryption PyOpenSSL RHEL8 py3.13 Auth SSL + run_on: + - rhel87-small + batchtime: 10080 + expansions: + AUTH: auth + SSL: ssl + test_encryption: "true" + test_encryption_pyopenssl: "true" + PYTHON_BINARY: /opt/python/3.13/bin/python3 + tags: [encryption_tag] + - name: encryption-pyopenssl-rhel8-pypy3.10-auth-ssl + tasks: + - name: .standalone + - name: .replica_set + - name: .sharded_cluster + display_name: Encryption PyOpenSSL RHEL8 pypy3.10 Auth SSL + run_on: + - rhel87-small + batchtime: 10080 + expansions: + AUTH: auth + SSL: ssl + test_encryption: "true" + test_encryption_pyopenssl: "true" + PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 + tags: [encryption_tag] + - name: encryption-rhel8-py3.10-auth-ssl + tasks: + - name: .replica_set + display_name: Encryption RHEL8 py3.10 Auth SSL + run_on: + - rhel87-small + expansions: + AUTH: auth + SSL: ssl + test_encryption: "true" + PYTHON_BINARY: /opt/python/3.10/bin/python3 + - name: encryption-crypt_shared-rhel8-py3.11-auth-nossl + tasks: + - name: .replica_set + display_name: Encryption crypt_shared RHEL8 py3.11 Auth NoSSL + run_on: + - rhel87-small + expansions: + AUTH: auth + SSL: nossl + test_encryption: "true" + test_crypt_shared: "true" + PYTHON_BINARY: /opt/python/3.11/bin/python3 + - name: encryption-pyopenssl-rhel8-py3.12-auth-ssl + tasks: + - name: .replica_set + display_name: Encryption PyOpenSSL RHEL8 py3.12 Auth SSL + run_on: + - rhel87-small + expansions: + AUTH: auth + SSL: ssl + test_encryption: "true" + test_encryption_pyopenssl: "true" + PYTHON_BINARY: /opt/python/3.12/bin/python3 + - name: encryption-rhel8-pypy3.9-auth-nossl + tasks: + - name: .replica_set + display_name: Encryption RHEL8 pypy3.9 Auth NoSSL + run_on: + - rhel87-small + expansions: + AUTH: auth + SSL: nossl + test_encryption: "true" + PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 + - name: encryption-macos-py3.9-auth-ssl + tasks: + - name: .latest .replica_set + display_name: Encryption macOS py3.9 Auth SSL + run_on: + - macos-14 + batchtime: 10080 + expansions: + AUTH: auth + SSL: ssl + test_encryption: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 + tags: [encryption_tag] + - name: encryption-macos-py3.13-auth-nossl + tasks: + - name: .latest .replica_set + display_name: Encryption macOS py3.13 Auth NoSSL + run_on: + - macos-14 + batchtime: 10080 + expansions: + AUTH: auth + SSL: nossl + test_encryption: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 + tags: [encryption_tag] + - name: encryption-crypt_shared-macos-py3.9-auth-ssl + tasks: + - name: .latest .replica_set + display_name: Encryption crypt_shared macOS py3.9 Auth SSL + run_on: + - macos-14 + batchtime: 10080 + expansions: + AUTH: auth + SSL: ssl + test_encryption: "true" + test_crypt_shared: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 + tags: [encryption_tag] + - name: encryption-crypt_shared-macos-py3.13-auth-nossl + tasks: + - name: .latest .replica_set + display_name: Encryption crypt_shared macOS py3.13 Auth NoSSL + run_on: + - macos-14 + batchtime: 10080 + expansions: + AUTH: auth + SSL: nossl + test_encryption: "true" + test_crypt_shared: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 + tags: [encryption_tag] + - name: encryption-win64-py3.9-auth-ssl + tasks: + - name: .latest .replica_set + display_name: Encryption Win64 py3.9 Auth SSL + run_on: + - windows-64-vsMulti-small + batchtime: 10080 + expansions: + AUTH: auth + SSL: ssl + test_encryption: "true" + PYTHON_BINARY: C:/python/Python39/python.exe + tags: [encryption_tag] + - name: encryption-win64-py3.13-auth-nossl + tasks: + - name: .latest .replica_set + display_name: Encryption Win64 py3.13 Auth NoSSL + run_on: + - windows-64-vsMulti-small + batchtime: 10080 + expansions: + AUTH: auth + SSL: nossl + test_encryption: "true" + PYTHON_BINARY: C:/python/Python313/python.exe + tags: [encryption_tag] + - name: encryption-crypt_shared-win64-py3.9-auth-ssl + tasks: + - name: .latest .replica_set + display_name: Encryption crypt_shared Win64 py3.9 Auth SSL + run_on: + - windows-64-vsMulti-small + batchtime: 10080 + expansions: + AUTH: auth + SSL: ssl + test_encryption: "true" + test_crypt_shared: "true" + PYTHON_BINARY: C:/python/Python39/python.exe + tags: [encryption_tag] + - name: encryption-crypt_shared-win64-py3.13-auth-nossl + tasks: + - name: .latest .replica_set + display_name: Encryption crypt_shared Win64 py3.13 Auth NoSSL + run_on: + - windows-64-vsMulti-small + batchtime: 10080 + expansions: + AUTH: auth + SSL: nossl + test_encryption: "true" + test_crypt_shared: "true" + PYTHON_BINARY: C:/python/Python313/python.exe + tags: [encryption_tag] + + # Enterprise auth tests + - name: enterprise-auth-macos-py3.9-auth + tasks: + - name: test-enterprise-auth + display_name: Enterprise Auth macOS py3.9 Auth + run_on: + - macos-14 + expansions: + AUTH: auth + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 + - name: enterprise-auth-rhel8-py3.10-auth + tasks: + - name: test-enterprise-auth + display_name: Enterprise Auth RHEL8 py3.10 Auth + run_on: + - rhel87-small + expansions: + AUTH: auth + PYTHON_BINARY: /opt/python/3.10/bin/python3 + - name: enterprise-auth-rhel8-py3.11-auth + tasks: + - name: test-enterprise-auth + display_name: Enterprise Auth RHEL8 py3.11 Auth + run_on: + - rhel87-small + expansions: + AUTH: auth + PYTHON_BINARY: /opt/python/3.11/bin/python3 + - name: enterprise-auth-rhel8-py3.12-auth + tasks: + - name: test-enterprise-auth + display_name: Enterprise Auth RHEL8 py3.12 Auth + run_on: + - rhel87-small + expansions: + AUTH: auth + PYTHON_BINARY: /opt/python/3.12/bin/python3 + - name: enterprise-auth-win64-py3.13-auth + tasks: + - name: test-enterprise-auth + display_name: Enterprise Auth Win64 py3.13 Auth + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: auth + PYTHON_BINARY: C:/python/Python313/python.exe + - name: enterprise-auth-rhel8-pypy3.9-auth + tasks: + - name: test-enterprise-auth + display_name: Enterprise Auth RHEL8 pypy3.9 Auth + run_on: + - rhel87-small + expansions: + AUTH: auth + PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 + - name: enterprise-auth-rhel8-pypy3.10-auth + tasks: + - name: test-enterprise-auth + display_name: Enterprise Auth RHEL8 pypy3.10 Auth + run_on: + - rhel87-small + expansions: + AUTH: auth + PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 + + # Green framework tests + - name: eventlet-rhel8-py3.9 + tasks: + - name: .standalone + display_name: Eventlet RHEL8 py3.9 + run_on: + - rhel87-small + expansions: + GREEN_FRAMEWORK: eventlet + AUTH: auth + SSL: ssl + PYTHON_BINARY: /opt/python/3.9/bin/python3 + - name: gevent-rhel8-py3.9 + tasks: + - name: .standalone + display_name: Gevent RHEL8 py3.9 + run_on: + - rhel87-small + expansions: + GREEN_FRAMEWORK: gevent + AUTH: auth + SSL: ssl + PYTHON_BINARY: /opt/python/3.9/bin/python3 + - name: eventlet-rhel8-py3.12 + tasks: + - name: .standalone + display_name: Eventlet RHEL8 py3.12 + run_on: + - rhel87-small + expansions: + GREEN_FRAMEWORK: eventlet + AUTH: auth + SSL: ssl + PYTHON_BINARY: /opt/python/3.12/bin/python3 + - name: gevent-rhel8-py3.12 + tasks: + - name: .standalone + display_name: Gevent RHEL8 py3.12 + run_on: + - rhel87-small + expansions: + GREEN_FRAMEWORK: gevent + AUTH: auth + SSL: ssl + PYTHON_BINARY: /opt/python/3.12/bin/python3 + + # Load balancer tests + - name: load-balancer-rhel8-v6.0-py3.9-auth-ssl + tasks: + - name: load-balancer-test + display_name: Load Balancer RHEL8 v6.0 py3.9 Auth SSL + run_on: + - rhel87-small + batchtime: 10080 + expansions: + VERSION: "6.0" + AUTH: auth + SSL: ssl + test_loadbalancer: "true" + PYTHON_BINARY: /opt/python/3.9/bin/python3 + - name: load-balancer-rhel8-v6.0-py3.10-noauth-ssl + tasks: + - name: load-balancer-test + display_name: Load Balancer RHEL8 v6.0 py3.10 NoAuth SSL + run_on: + - rhel87-small + batchtime: 10080 + expansions: + VERSION: "6.0" + AUTH: noauth + SSL: ssl + test_loadbalancer: "true" + PYTHON_BINARY: /opt/python/3.10/bin/python3 + - name: load-balancer-rhel8-v6.0-py3.11-noauth-nossl + tasks: + - name: load-balancer-test + display_name: Load Balancer RHEL8 v6.0 py3.11 NoAuth NoSSL + run_on: + - rhel87-small + batchtime: 10080 + expansions: + VERSION: "6.0" + AUTH: noauth + SSL: nossl + test_loadbalancer: "true" + PYTHON_BINARY: /opt/python/3.11/bin/python3 + - name: load-balancer-rhel8-v7.0-py3.12-auth-ssl + tasks: + - name: load-balancer-test + display_name: Load Balancer RHEL8 v7.0 py3.12 Auth SSL + run_on: + - rhel87-small + batchtime: 10080 + expansions: + VERSION: "7.0" + AUTH: auth + SSL: ssl + test_loadbalancer: "true" + PYTHON_BINARY: /opt/python/3.12/bin/python3 + - name: load-balancer-rhel8-v7.0-py3.13-noauth-ssl + tasks: + - name: load-balancer-test + display_name: Load Balancer RHEL8 v7.0 py3.13 NoAuth SSL + run_on: + - rhel87-small + batchtime: 10080 + expansions: + VERSION: "7.0" + AUTH: noauth + SSL: ssl + test_loadbalancer: "true" + PYTHON_BINARY: /opt/python/3.13/bin/python3 + - name: load-balancer-rhel8-v7.0-pypy3.9-noauth-nossl + tasks: + - name: load-balancer-test + display_name: Load Balancer RHEL8 v7.0 pypy3.9 NoAuth NoSSL + run_on: + - rhel87-small + batchtime: 10080 + expansions: + VERSION: "7.0" + AUTH: noauth + SSL: nossl + test_loadbalancer: "true" + PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 + - name: load-balancer-rhel8-v8.0-pypy3.10-auth-ssl + tasks: + - name: load-balancer-test + display_name: Load Balancer RHEL8 v8.0 pypy3.10 Auth SSL + run_on: + - rhel87-small + batchtime: 10080 + expansions: + VERSION: "8.0" + AUTH: auth + SSL: ssl + test_loadbalancer: "true" + PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 + - name: load-balancer-rhel8-v8.0-py3.9-noauth-ssl + tasks: + - name: load-balancer-test + display_name: Load Balancer RHEL8 v8.0 py3.9 NoAuth SSL + run_on: + - rhel87-small + batchtime: 10080 + expansions: + VERSION: "8.0" + AUTH: noauth + SSL: ssl + test_loadbalancer: "true" + PYTHON_BINARY: /opt/python/3.9/bin/python3 + - name: load-balancer-rhel8-v8.0-py3.10-noauth-nossl + tasks: + - name: load-balancer-test + display_name: Load Balancer RHEL8 v8.0 py3.10 NoAuth NoSSL + run_on: + - rhel87-small + batchtime: 10080 + expansions: + VERSION: "8.0" + AUTH: noauth + SSL: nossl + test_loadbalancer: "true" + PYTHON_BINARY: /opt/python/3.10/bin/python3 + - name: load-balancer-rhel8-rapid-py3.11-auth-ssl + tasks: + - name: load-balancer-test + display_name: Load Balancer RHEL8 rapid py3.11 Auth SSL + run_on: + - rhel87-small + batchtime: 10080 + expansions: + VERSION: rapid + AUTH: auth + SSL: ssl + test_loadbalancer: "true" + PYTHON_BINARY: /opt/python/3.11/bin/python3 + - name: load-balancer-rhel8-rapid-py3.12-noauth-ssl + tasks: + - name: load-balancer-test + display_name: Load Balancer RHEL8 rapid py3.12 NoAuth SSL + run_on: + - rhel87-small + batchtime: 10080 + expansions: + VERSION: rapid + AUTH: noauth + SSL: ssl + test_loadbalancer: "true" + PYTHON_BINARY: /opt/python/3.12/bin/python3 + - name: load-balancer-rhel8-rapid-py3.13-noauth-nossl + tasks: + - name: load-balancer-test + display_name: Load Balancer RHEL8 rapid py3.13 NoAuth NoSSL + run_on: + - rhel87-small + batchtime: 10080 + expansions: + VERSION: rapid + AUTH: noauth + SSL: nossl + test_loadbalancer: "true" + PYTHON_BINARY: /opt/python/3.13/bin/python3 + - name: load-balancer-rhel8-latest-pypy3.9-auth-ssl + tasks: + - name: load-balancer-test + display_name: Load Balancer RHEL8 latest pypy3.9 Auth SSL + run_on: + - rhel87-small + batchtime: 10080 + expansions: + VERSION: latest + AUTH: auth + SSL: ssl + test_loadbalancer: "true" + PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 + - name: load-balancer-rhel8-latest-pypy3.10-noauth-ssl + tasks: + - name: load-balancer-test + display_name: Load Balancer RHEL8 latest pypy3.10 NoAuth SSL + run_on: + - rhel87-small + batchtime: 10080 + expansions: + VERSION: latest + AUTH: noauth + SSL: ssl + test_loadbalancer: "true" + PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 + - name: load-balancer-rhel8-latest-py3.9-noauth-nossl + tasks: + - name: load-balancer-test + display_name: Load Balancer RHEL8 latest py3.9 NoAuth NoSSL + run_on: + - rhel87-small + batchtime: 10080 + expansions: + VERSION: latest + AUTH: noauth + SSL: nossl + test_loadbalancer: "true" + PYTHON_BINARY: /opt/python/3.9/bin/python3 + + # Mockupdb tests + - name: mockupdb-tests-rhel8-py3.9 + tasks: + - name: mockupdb + display_name: MockupDB Tests RHEL8 py3.9 + run_on: + - rhel87-small + expansions: + PYTHON_BINARY: /opt/python/3.9/bin/python3 + + # Mod wsgi tests + - name: mod_wsgi-ubuntu-22-py3.9 + tasks: + - name: mod-wsgi-standalone + - name: mod-wsgi-replica-set + - name: mod-wsgi-embedded-mode-standalone + - name: mod-wsgi-embedded-mode-replica-set + display_name: mod_wsgi Ubuntu-22 py3.9 + run_on: + - ubuntu2204-small + expansions: + MOD_WSGI_VERSION: "4" + PYTHON_BINARY: /opt/python/3.9/bin/python3 + - name: mod_wsgi-ubuntu-22-py3.13 + tasks: + - name: mod-wsgi-standalone + - name: mod-wsgi-replica-set + - name: mod-wsgi-embedded-mode-standalone + - name: mod-wsgi-embedded-mode-replica-set + display_name: mod_wsgi Ubuntu-22 py3.13 + run_on: + - ubuntu2204-small + expansions: + MOD_WSGI_VERSION: "4" + PYTHON_BINARY: /opt/python/3.13/bin/python3 + + # No c ext tests + - name: no-c-ext-rhel8-py3.9 + tasks: + - name: .standalone + display_name: No C Ext RHEL8 py3.9 + run_on: + - rhel87-small + expansions: + NO_EXT: "1" + PYTHON_BINARY: /opt/python/3.9/bin/python3 + - name: no-c-ext-rhel8-py3.10 + tasks: + - name: .replica_set + display_name: No C Ext RHEL8 py3.10 + run_on: + - rhel87-small + expansions: + NO_EXT: "1" + PYTHON_BINARY: /opt/python/3.10/bin/python3 + - name: no-c-ext-rhel8-py3.11 + tasks: + - name: .sharded_cluster + display_name: No C Ext RHEL8 py3.11 + run_on: + - rhel87-small + expansions: + NO_EXT: "1" + PYTHON_BINARY: /opt/python/3.11/bin/python3 + - name: no-c-ext-rhel8-py3.12 + tasks: + - name: .standalone + display_name: No C Ext RHEL8 py3.12 + run_on: + - rhel87-small + expansions: + NO_EXT: "1" + PYTHON_BINARY: /opt/python/3.12/bin/python3 + - name: no-c-ext-rhel8-py3.13 + tasks: + - name: .replica_set + display_name: No C Ext RHEL8 py3.13 + run_on: + - rhel87-small + expansions: + NO_EXT: "1" + PYTHON_BINARY: /opt/python/3.13/bin/python3 + + # Ocsp tests + - name: ocsp-test-rhel8-py3.9 + tasks: + - name: .ocsp + display_name: OCSP test RHEL8 py3.9 + run_on: + - rhel87-small + batchtime: 20160 + expansions: + AUTH: noauth + SSL: ssl + TOPOLOGY: server + PYTHON_BINARY: /opt/python/3.9/bin/python3 + VERSION: "4.4" + - name: ocsp-test-rhel8-py3.10 + tasks: + - name: .ocsp + display_name: OCSP test RHEL8 py3.10 + run_on: + - rhel87-small + batchtime: 20160 + expansions: + AUTH: noauth + SSL: ssl + TOPOLOGY: server + PYTHON_BINARY: /opt/python/3.10/bin/python3 + VERSION: "5.0" + - name: ocsp-test-rhel8-py3.11 + tasks: + - name: .ocsp + display_name: OCSP test RHEL8 py3.11 + run_on: + - rhel87-small + batchtime: 20160 + expansions: + AUTH: noauth + SSL: ssl + TOPOLOGY: server + PYTHON_BINARY: /opt/python/3.11/bin/python3 + VERSION: "6.0" + - name: ocsp-test-rhel8-py3.12 + tasks: + - name: .ocsp + display_name: OCSP test RHEL8 py3.12 + run_on: + - rhel87-small + batchtime: 20160 + expansions: + AUTH: noauth + SSL: ssl + TOPOLOGY: server + PYTHON_BINARY: /opt/python/3.12/bin/python3 + VERSION: "7.0" + - name: ocsp-test-rhel8-py3.13 + tasks: + - name: .ocsp + display_name: OCSP test RHEL8 py3.13 + run_on: + - rhel87-small + batchtime: 20160 + expansions: + AUTH: noauth + SSL: ssl + TOPOLOGY: server + PYTHON_BINARY: /opt/python/3.13/bin/python3 + VERSION: "8.0" + - name: ocsp-test-rhel8-pypy3.9 + tasks: + - name: .ocsp + display_name: OCSP test RHEL8 pypy3.9 + run_on: + - rhel87-small + batchtime: 20160 + expansions: + AUTH: noauth + SSL: ssl + TOPOLOGY: server + PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 + VERSION: rapid + - name: ocsp-test-rhel8-pypy3.10 + tasks: + - name: .ocsp + display_name: OCSP test RHEL8 pypy3.10 + run_on: + - rhel87-small + batchtime: 20160 + expansions: + AUTH: noauth + SSL: ssl + TOPOLOGY: server + PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 + VERSION: latest + - name: ocsp-test-win64-py3.9 + tasks: + - name: .ocsp-rsa !.ocsp-staple + display_name: OCSP test Win64 py3.9 + run_on: + - windows-64-vsMulti-small + batchtime: 20160 + expansions: + AUTH: noauth + SSL: ssl + TOPOLOGY: server + PYTHON_BINARY: C:/python/Python39/python.exe + VERSION: "4.4" + - name: ocsp-test-win64-py3.13 + tasks: + - name: .ocsp-rsa !.ocsp-staple + display_name: OCSP test Win64 py3.13 + run_on: + - windows-64-vsMulti-small + batchtime: 20160 + expansions: + AUTH: noauth + SSL: ssl + TOPOLOGY: server + PYTHON_BINARY: C:/python/Python313/python.exe + VERSION: "8.0" + - name: ocsp-test-macos-py3.9 + tasks: + - name: .ocsp-rsa !.ocsp-staple + display_name: OCSP test macOS py3.9 + run_on: + - macos-14 + batchtime: 20160 + expansions: + AUTH: noauth + SSL: ssl + TOPOLOGY: server + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 + VERSION: "4.4" + - name: ocsp-test-macos-py3.13 + tasks: + - name: .ocsp-rsa !.ocsp-staple + display_name: OCSP test macOS py3.13 + run_on: + - macos-14 + batchtime: 20160 + expansions: + AUTH: noauth + SSL: ssl + TOPOLOGY: server + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 + VERSION: "8.0" + + # Oidc auth tests + - name: oidc-auth-rhel8 + tasks: + - name: testoidc_task_group + display_name: OIDC Auth RHEL8 + run_on: + - rhel87-small + batchtime: 20160 + - name: oidc-auth-macos + tasks: + - name: testoidc_task_group + display_name: OIDC Auth macOS + run_on: + - macos-14 + batchtime: 20160 + - name: oidc-auth-win64 + tasks: + - name: testoidc_task_group + display_name: OIDC Auth Win64 + run_on: + - windows-64-vsMulti-small + batchtime: 20160 + + # Pyopenssl tests + - name: pyopenssl-macos-py3.9 + tasks: + - name: .replica_set + - name: .7.0 + display_name: PyOpenSSL macOS py3.9 + run_on: + - macos-14 + batchtime: 10080 + expansions: + AUTH: noauth + test_pyopenssl: "true" + SSL: ssl + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 + - name: pyopenssl-rhel8-py3.10 + tasks: + - name: .replica_set + - name: .7.0 + display_name: PyOpenSSL RHEL8 py3.10 + run_on: + - rhel87-small + batchtime: 10080 + expansions: + AUTH: auth + test_pyopenssl: "true" + SSL: ssl + PYTHON_BINARY: /opt/python/3.10/bin/python3 + - name: pyopenssl-rhel8-py3.11 + tasks: + - name: .replica_set + - name: .7.0 + display_name: PyOpenSSL RHEL8 py3.11 + run_on: + - rhel87-small + batchtime: 10080 + expansions: + AUTH: auth + test_pyopenssl: "true" + SSL: ssl + PYTHON_BINARY: /opt/python/3.11/bin/python3 + - name: pyopenssl-rhel8-py3.12 + tasks: + - name: .replica_set + - name: .7.0 + display_name: PyOpenSSL RHEL8 py3.12 + run_on: + - rhel87-small + batchtime: 10080 + expansions: + AUTH: auth + test_pyopenssl: "true" + SSL: ssl + PYTHON_BINARY: /opt/python/3.12/bin/python3 + - name: pyopenssl-win64-py3.13 + tasks: + - name: .replica_set + - name: .7.0 + display_name: PyOpenSSL Win64 py3.13 + run_on: + - windows-64-vsMulti-small + batchtime: 10080 + expansions: + AUTH: auth + test_pyopenssl: "true" + SSL: ssl + PYTHON_BINARY: C:/python/Python313/python.exe + - name: pyopenssl-rhel8-pypy3.9 + tasks: + - name: .replica_set + - name: .7.0 + display_name: PyOpenSSL RHEL8 pypy3.9 + run_on: + - rhel87-small + batchtime: 10080 + expansions: + AUTH: auth + test_pyopenssl: "true" + SSL: ssl + PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 + - name: pyopenssl-rhel8-pypy3.10 + tasks: + - name: .replica_set + - name: .7.0 + display_name: PyOpenSSL RHEL8 pypy3.10 + run_on: + - rhel87-small + batchtime: 10080 + expansions: + AUTH: auth + test_pyopenssl: "true" + SSL: ssl + PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 + + # Search index tests + - name: search-index-helpers-rhel8-py3.9 + tasks: + - name: test_atlas_task_group_search_indexes + display_name: Search Index Helpers RHEL8 py3.9 + run_on: + - rhel87-small + expansions: + PYTHON_BINARY: /opt/python/3.9/bin/python3 + + # Server tests + - name: test-rhel8-py3.9-auth-ssl-cov + tasks: + - name: .standalone + - name: .replica_set + - name: .sharded_cluster + display_name: Test RHEL8 py3.9 Auth SSL cov + run_on: + - rhel87-small + expansions: + AUTH: auth + SSL: ssl + COVERAGE: coverage + PYTHON_BINARY: /opt/python/3.9/bin/python3 + tags: [coverage_tag] + - name: test-rhel8-py3.9-noauth-ssl-cov + tasks: + - name: .standalone + - name: .replica_set + - name: .sharded_cluster + display_name: Test RHEL8 py3.9 NoAuth SSL cov + run_on: + - rhel87-small + expansions: + AUTH: noauth + SSL: ssl + COVERAGE: coverage + PYTHON_BINARY: /opt/python/3.9/bin/python3 + tags: [coverage_tag] + - name: test-rhel8-py3.9-noauth-nossl-cov + tasks: + - name: .standalone + - name: .replica_set + - name: .sharded_cluster + display_name: Test RHEL8 py3.9 NoAuth NoSSL cov + run_on: + - rhel87-small + expansions: + AUTH: noauth + SSL: nossl + COVERAGE: coverage + PYTHON_BINARY: /opt/python/3.9/bin/python3 + tags: [coverage_tag] + - name: test-rhel8-py3.13-auth-ssl-cov + tasks: + - name: .standalone + - name: .replica_set + - name: .sharded_cluster + display_name: Test RHEL8 py3.13 Auth SSL cov + run_on: + - rhel87-small + expansions: + AUTH: auth + SSL: ssl + COVERAGE: coverage + PYTHON_BINARY: /opt/python/3.13/bin/python3 + tags: [coverage_tag] + - name: test-rhel8-py3.13-noauth-ssl-cov + tasks: + - name: .standalone + - name: .replica_set + - name: .sharded_cluster + display_name: Test RHEL8 py3.13 NoAuth SSL cov + run_on: + - rhel87-small + expansions: + AUTH: noauth + SSL: ssl + COVERAGE: coverage + PYTHON_BINARY: /opt/python/3.13/bin/python3 + tags: [coverage_tag] + - name: test-rhel8-py3.13-noauth-nossl-cov + tasks: + - name: .standalone + - name: .replica_set + - name: .sharded_cluster + display_name: Test RHEL8 py3.13 NoAuth NoSSL cov + run_on: + - rhel87-small + expansions: + AUTH: noauth + SSL: nossl + COVERAGE: coverage + PYTHON_BINARY: /opt/python/3.13/bin/python3 + tags: [coverage_tag] + - name: test-rhel8-pypy3.10-auth-ssl-cov + tasks: + - name: .standalone + - name: .replica_set + - name: .sharded_cluster + display_name: Test RHEL8 pypy3.10 Auth SSL cov + run_on: + - rhel87-small + expansions: + AUTH: auth + SSL: ssl + COVERAGE: coverage + PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 + tags: [coverage_tag] + - name: test-rhel8-pypy3.10-noauth-ssl-cov + tasks: + - name: .standalone + - name: .replica_set + - name: .sharded_cluster + display_name: Test RHEL8 pypy3.10 NoAuth SSL cov + run_on: + - rhel87-small + expansions: + AUTH: noauth + SSL: ssl + COVERAGE: coverage + PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 + tags: [coverage_tag] + - name: test-rhel8-pypy3.10-noauth-nossl-cov + tasks: + - name: .standalone + - name: .replica_set + - name: .sharded_cluster + display_name: Test RHEL8 pypy3.10 NoAuth NoSSL cov + run_on: + - rhel87-small + expansions: + AUTH: noauth + SSL: nossl + COVERAGE: coverage + PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 + tags: [coverage_tag] + - name: test-rhel8-py3.10-auth-ssl + tasks: + - name: .standalone + display_name: Test RHEL8 py3.10 Auth SSL + run_on: + - rhel87-small + expansions: + AUTH: auth + SSL: ssl + PYTHON_BINARY: /opt/python/3.10/bin/python3 + - name: test-rhel8-py3.11-noauth-ssl + tasks: + - name: .replica_set + display_name: Test RHEL8 py3.11 NoAuth SSL + run_on: + - rhel87-small + expansions: + AUTH: noauth + SSL: ssl + PYTHON_BINARY: /opt/python/3.11/bin/python3 + - name: test-rhel8-py3.12-noauth-nossl + tasks: + - name: .sharded_cluster + display_name: Test RHEL8 py3.12 NoAuth NoSSL + run_on: + - rhel87-small + expansions: + AUTH: noauth + SSL: nossl + PYTHON_BINARY: /opt/python/3.12/bin/python3 + - name: test-rhel8-pypy3.9-auth-ssl + tasks: + - name: .standalone + display_name: Test RHEL8 pypy3.9 Auth SSL + run_on: + - rhel87-small + expansions: + AUTH: auth + SSL: ssl + PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 + - name: test-macos-py3.9-auth-ssl-sync + tasks: + - name: .standalone + display_name: Test macOS py3.9 Auth SSL Sync + run_on: + - macos-14 + expansions: + AUTH: auth + SSL: ssl + TEST_SUITES: default + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 + - name: test-macos-py3.9-noauth-ssl-sync + tasks: + - name: .standalone + display_name: Test macOS py3.9 NoAuth SSL Sync + run_on: + - macos-14 + expansions: + AUTH: noauth + SSL: ssl + TEST_SUITES: default + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 + - name: test-macos-py3.9-noauth-nossl-sync + tasks: + - name: .standalone + display_name: Test macOS py3.9 NoAuth NoSSL Sync + run_on: + - macos-14 + expansions: + AUTH: noauth + SSL: nossl + TEST_SUITES: default + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 + - name: test-macos-py3.9-auth-ssl-async + tasks: + - name: .standalone + display_name: Test macOS py3.9 Auth SSL Async + run_on: + - macos-14 + expansions: + AUTH: auth + SSL: ssl + TEST_SUITES: default_async + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 + - name: test-macos-py3.9-noauth-ssl-async + tasks: + - name: .standalone + display_name: Test macOS py3.9 NoAuth SSL Async + run_on: + - macos-14 + expansions: + AUTH: noauth + SSL: ssl + TEST_SUITES: default_async + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 + - name: test-macos-py3.9-noauth-nossl-async + tasks: + - name: .standalone + display_name: Test macOS py3.9 NoAuth NoSSL Async + run_on: + - macos-14 + expansions: + AUTH: noauth + SSL: nossl + TEST_SUITES: default_async + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 + - name: test-macos-py3.13-auth-ssl-sync + tasks: + - name: .sharded_cluster + display_name: Test macOS py3.13 Auth SSL Sync + run_on: + - macos-14 + expansions: + AUTH: auth + SSL: ssl + TEST_SUITES: default + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 + - name: test-macos-py3.13-noauth-ssl-sync + tasks: + - name: .sharded_cluster + display_name: Test macOS py3.13 NoAuth SSL Sync + run_on: + - macos-14 + expansions: + AUTH: noauth + SSL: ssl + TEST_SUITES: default + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 + - name: test-macos-py3.13-noauth-nossl-sync + tasks: + - name: .sharded_cluster + display_name: Test macOS py3.13 NoAuth NoSSL Sync + run_on: + - macos-14 + expansions: + AUTH: noauth + SSL: nossl + TEST_SUITES: default + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 + - name: test-macos-py3.13-auth-ssl-async + tasks: + - name: .sharded_cluster + display_name: Test macOS py3.13 Auth SSL Async + run_on: + - macos-14 + expansions: + AUTH: auth + SSL: ssl + TEST_SUITES: default_async + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 + - name: test-macos-py3.13-noauth-ssl-async + tasks: + - name: .sharded_cluster + display_name: Test macOS py3.13 NoAuth SSL Async + run_on: + - macos-14 + expansions: + AUTH: noauth + SSL: ssl + TEST_SUITES: default_async + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 + - name: test-macos-py3.13-noauth-nossl-async + tasks: + - name: .sharded_cluster + display_name: Test macOS py3.13 NoAuth NoSSL Async + run_on: + - macos-14 + expansions: + AUTH: noauth + SSL: nossl + TEST_SUITES: default_async + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 + - name: test-macos-arm64-py3.9-auth-ssl-sync + tasks: + - name: .standalone .6.0 + - name: .standalone .7.0 + - name: .standalone .8.0 + - name: .standalone .rapid + - name: .standalone .latest + display_name: Test macOS Arm64 py3.9 Auth SSL Sync + run_on: + - macos-14-arm64 + expansions: + AUTH: auth + SSL: ssl + TEST_SUITES: default + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 + - name: test-macos-arm64-py3.9-noauth-ssl-sync + tasks: + - name: .standalone .6.0 + - name: .standalone .7.0 + - name: .standalone .8.0 + - name: .standalone .rapid + - name: .standalone .latest + display_name: Test macOS Arm64 py3.9 NoAuth SSL Sync + run_on: + - macos-14-arm64 + expansions: + AUTH: noauth + SSL: ssl + TEST_SUITES: default + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 + - name: test-macos-arm64-py3.9-noauth-nossl-sync + tasks: + - name: .standalone .6.0 + - name: .standalone .7.0 + - name: .standalone .8.0 + - name: .standalone .rapid + - name: .standalone .latest + display_name: Test macOS Arm64 py3.9 NoAuth NoSSL Sync + run_on: + - macos-14-arm64 + expansions: + AUTH: noauth + SSL: nossl + TEST_SUITES: default + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 + - name: test-macos-arm64-py3.9-auth-ssl-async + tasks: + - name: .standalone .6.0 + - name: .standalone .7.0 + - name: .standalone .8.0 + - name: .standalone .rapid + - name: .standalone .latest + display_name: Test macOS Arm64 py3.9 Auth SSL Async + run_on: + - macos-14-arm64 + expansions: + AUTH: auth + SSL: ssl + TEST_SUITES: default_async + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 + - name: test-macos-arm64-py3.9-noauth-ssl-async + tasks: + - name: .standalone .6.0 + - name: .standalone .7.0 + - name: .standalone .8.0 + - name: .standalone .rapid + - name: .standalone .latest + display_name: Test macOS Arm64 py3.9 NoAuth SSL Async + run_on: + - macos-14-arm64 + expansions: + AUTH: noauth + SSL: ssl + TEST_SUITES: default_async + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 + - name: test-macos-arm64-py3.9-noauth-nossl-async + tasks: + - name: .standalone .6.0 + - name: .standalone .7.0 + - name: .standalone .8.0 + - name: .standalone .rapid + - name: .standalone .latest + display_name: Test macOS Arm64 py3.9 NoAuth NoSSL Async + run_on: + - macos-14-arm64 + expansions: + AUTH: noauth + SSL: nossl + TEST_SUITES: default_async + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 + - name: test-macos-arm64-py3.13-auth-ssl-sync + tasks: + - name: .sharded_cluster .6.0 + - name: .sharded_cluster .7.0 + - name: .sharded_cluster .8.0 + - name: .sharded_cluster .rapid + - name: .sharded_cluster .latest + display_name: Test macOS Arm64 py3.13 Auth SSL Sync + run_on: + - macos-14-arm64 + expansions: + AUTH: auth + SSL: ssl + TEST_SUITES: default + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 + - name: test-macos-arm64-py3.13-noauth-ssl-sync + tasks: + - name: .sharded_cluster .6.0 + - name: .sharded_cluster .7.0 + - name: .sharded_cluster .8.0 + - name: .sharded_cluster .rapid + - name: .sharded_cluster .latest + display_name: Test macOS Arm64 py3.13 NoAuth SSL Sync + run_on: + - macos-14-arm64 + expansions: + AUTH: noauth + SSL: ssl + TEST_SUITES: default + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 + - name: test-macos-arm64-py3.13-noauth-nossl-sync + tasks: + - name: .sharded_cluster .6.0 + - name: .sharded_cluster .7.0 + - name: .sharded_cluster .8.0 + - name: .sharded_cluster .rapid + - name: .sharded_cluster .latest + display_name: Test macOS Arm64 py3.13 NoAuth NoSSL Sync + run_on: + - macos-14-arm64 + expansions: + AUTH: noauth + SSL: nossl + TEST_SUITES: default + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 + - name: test-macos-arm64-py3.13-auth-ssl-async + tasks: + - name: .sharded_cluster .6.0 + - name: .sharded_cluster .7.0 + - name: .sharded_cluster .8.0 + - name: .sharded_cluster .rapid + - name: .sharded_cluster .latest + display_name: Test macOS Arm64 py3.13 Auth SSL Async + run_on: + - macos-14-arm64 + expansions: + AUTH: auth + SSL: ssl + TEST_SUITES: default_async + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 + - name: test-macos-arm64-py3.13-noauth-ssl-async + tasks: + - name: .sharded_cluster .6.0 + - name: .sharded_cluster .7.0 + - name: .sharded_cluster .8.0 + - name: .sharded_cluster .rapid + - name: .sharded_cluster .latest + display_name: Test macOS Arm64 py3.13 NoAuth SSL Async + run_on: + - macos-14-arm64 + expansions: + AUTH: noauth + SSL: ssl + TEST_SUITES: default_async + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 + - name: test-macos-arm64-py3.13-noauth-nossl-async + tasks: + - name: .sharded_cluster .6.0 + - name: .sharded_cluster .7.0 + - name: .sharded_cluster .8.0 + - name: .sharded_cluster .rapid + - name: .sharded_cluster .latest + display_name: Test macOS Arm64 py3.13 NoAuth NoSSL Async + run_on: + - macos-14-arm64 + expansions: + AUTH: noauth + SSL: nossl + TEST_SUITES: default_async + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 + - name: test-win64-py3.9-auth-ssl-sync + tasks: + - name: .standalone + display_name: Test Win64 py3.9 Auth SSL Sync + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: auth + SSL: ssl + TEST_SUITES: default + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: C:/python/Python39/python.exe + - name: test-win64-py3.9-noauth-ssl-sync + tasks: + - name: .standalone + display_name: Test Win64 py3.9 NoAuth SSL Sync + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: noauth + SSL: ssl + TEST_SUITES: default + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: C:/python/Python39/python.exe + - name: test-win64-py3.9-noauth-nossl-sync + tasks: + - name: .standalone + display_name: Test Win64 py3.9 NoAuth NoSSL Sync + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: noauth + SSL: nossl + TEST_SUITES: default + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: C:/python/Python39/python.exe + - name: test-win64-py3.9-auth-ssl-async + tasks: + - name: .standalone + display_name: Test Win64 py3.9 Auth SSL Async + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: auth + SSL: ssl + TEST_SUITES: default_async + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: C:/python/Python39/python.exe + - name: test-win64-py3.9-noauth-ssl-async + tasks: + - name: .standalone + display_name: Test Win64 py3.9 NoAuth SSL Async + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: noauth + SSL: ssl + TEST_SUITES: default_async + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: C:/python/Python39/python.exe + - name: test-win64-py3.9-noauth-nossl-async + tasks: + - name: .standalone + display_name: Test Win64 py3.9 NoAuth NoSSL Async + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: noauth + SSL: nossl + TEST_SUITES: default_async + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: C:/python/Python39/python.exe + - name: test-win64-py3.13-auth-ssl-sync + tasks: + - name: .sharded_cluster + display_name: Test Win64 py3.13 Auth SSL Sync + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: auth + SSL: ssl + TEST_SUITES: default + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: C:/python/Python313/python.exe + - name: test-win64-py3.13-noauth-ssl-sync + tasks: + - name: .sharded_cluster + display_name: Test Win64 py3.13 NoAuth SSL Sync + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: noauth + SSL: ssl + TEST_SUITES: default + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: C:/python/Python313/python.exe + - name: test-win64-py3.13-noauth-nossl-sync + tasks: + - name: .sharded_cluster + display_name: Test Win64 py3.13 NoAuth NoSSL Sync + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: noauth + SSL: nossl + TEST_SUITES: default + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: C:/python/Python313/python.exe + - name: test-win64-py3.13-auth-ssl-async + tasks: + - name: .sharded_cluster + display_name: Test Win64 py3.13 Auth SSL Async + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: auth + SSL: ssl + TEST_SUITES: default_async + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: C:/python/Python313/python.exe + - name: test-win64-py3.13-noauth-ssl-async + tasks: + - name: .sharded_cluster + display_name: Test Win64 py3.13 NoAuth SSL Async + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: noauth + SSL: ssl + TEST_SUITES: default_async + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: C:/python/Python313/python.exe + - name: test-win64-py3.13-noauth-nossl-async + tasks: + - name: .sharded_cluster + display_name: Test Win64 py3.13 NoAuth NoSSL Async + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: noauth + SSL: nossl + TEST_SUITES: default_async + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: C:/python/Python313/python.exe + - name: test-win32-py3.9-auth-ssl-sync + tasks: + - name: .standalone + display_name: Test Win32 py3.9 Auth SSL Sync + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: auth + SSL: ssl + TEST_SUITES: default + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: C:/python/32/Python39/python.exe + - name: test-win32-py3.9-noauth-ssl-sync + tasks: + - name: .standalone + display_name: Test Win32 py3.9 NoAuth SSL Sync + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: noauth + SSL: ssl + TEST_SUITES: default + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: C:/python/32/Python39/python.exe + - name: test-win32-py3.9-noauth-nossl-sync + tasks: + - name: .standalone + display_name: Test Win32 py3.9 NoAuth NoSSL Sync + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: noauth + SSL: nossl + TEST_SUITES: default + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: C:/python/32/Python39/python.exe + - name: test-win32-py3.9-auth-ssl-async + tasks: + - name: .standalone + display_name: Test Win32 py3.9 Auth SSL Async + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: auth + SSL: ssl + TEST_SUITES: default_async + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: C:/python/32/Python39/python.exe + - name: test-win32-py3.9-noauth-ssl-async + tasks: + - name: .standalone + display_name: Test Win32 py3.9 NoAuth SSL Async + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: noauth + SSL: ssl + TEST_SUITES: default_async + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: C:/python/32/Python39/python.exe + - name: test-win32-py3.9-noauth-nossl-async + tasks: + - name: .standalone + display_name: Test Win32 py3.9 NoAuth NoSSL Async + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: noauth + SSL: nossl + TEST_SUITES: default_async + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: C:/python/32/Python39/python.exe + - name: test-win32-py3.13-auth-ssl-sync + tasks: + - name: .sharded_cluster + display_name: Test Win32 py3.13 Auth SSL Sync + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: auth + SSL: ssl + TEST_SUITES: default + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: C:/python/32/Python313/python.exe + - name: test-win32-py3.13-noauth-ssl-sync + tasks: + - name: .sharded_cluster + display_name: Test Win32 py3.13 NoAuth SSL Sync + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: noauth + SSL: ssl + TEST_SUITES: default + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: C:/python/32/Python313/python.exe + - name: test-win32-py3.13-noauth-nossl-sync + tasks: + - name: .sharded_cluster + display_name: Test Win32 py3.13 NoAuth NoSSL Sync + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: noauth + SSL: nossl + TEST_SUITES: default + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: C:/python/32/Python313/python.exe + - name: test-win32-py3.13-auth-ssl-async + tasks: + - name: .sharded_cluster + display_name: Test Win32 py3.13 Auth SSL Async + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: auth + SSL: ssl + TEST_SUITES: default_async + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: C:/python/32/Python313/python.exe + - name: test-win32-py3.13-noauth-ssl-async + tasks: + - name: .sharded_cluster + display_name: Test Win32 py3.13 NoAuth SSL Async + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: noauth + SSL: ssl + TEST_SUITES: default_async + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: C:/python/32/Python313/python.exe + - name: test-win32-py3.13-noauth-nossl-async + tasks: + - name: .sharded_cluster + display_name: Test Win32 py3.13 NoAuth NoSSL Async + run_on: + - windows-64-vsMulti-small + expansions: + AUTH: noauth + SSL: nossl + TEST_SUITES: default_async + SKIP_CSOT_TESTS: "true" + PYTHON_BINARY: C:/python/32/Python313/python.exe + + # Serverless tests + - name: serverless-rhel8-py3.9 + tasks: + - name: serverless_task_group + display_name: Serverless RHEL8 py3.9 + run_on: + - rhel87-small + batchtime: 10080 + expansions: + test_serverless: "true" + AUTH: auth + SSL: ssl + PYTHON_BINARY: /opt/python/3.9/bin/python3 + - name: serverless-rhel8-py3.13 + tasks: + - name: serverless_task_group + display_name: Serverless RHEL8 py3.13 + run_on: + - rhel87-small + batchtime: 10080 + expansions: + test_serverless: "true" + AUTH: auth + SSL: ssl + PYTHON_BINARY: /opt/python/3.13/bin/python3 diff --git a/.evergreen/scripts/generate_config.py b/.evergreen/scripts/generate_config.py index 7adeac82e0..b65d9b62da 100644 --- a/.evergreen/scripts/generate_config.py +++ b/.evergreen/scripts/generate_config.py @@ -9,8 +9,11 @@ # Note: Run this file with `hatch run`, `pipx run`, or `uv run`. from __future__ import annotations +import sys from dataclasses import dataclass +from inspect import getmembers, isfunction from itertools import cycle, product, zip_longest +from pathlib import Path from typing import Any from shrub.v3.evg_build_variant import BuildVariant @@ -172,10 +175,10 @@ def handle_c_ext(c_ext, expansions): expansions["NO_EXT"] = "1" -def generate_yaml(tasks=None, variants=None): +def create_yaml(tasks=None, variants=None): """Generate the yaml for a given set of tasks and variants.""" project = EvgProject(tasks=tasks, buildvariants=variants) - out = ShrubService.generate_yaml(project) + out = ShrubService.create_yaml(project) # Dedent by two spaces to match what we use in config.yml lines = [line[2:] for line in out.splitlines()] print("\n".join(lines)) # noqa: T201 @@ -198,7 +201,7 @@ def create_ocsp_variants() -> list[BuildVariant]: host = "rhel8" variant = create_variant( [".ocsp"], - get_display_name(base_display, host, version, python), + get_display_name(base_display, host, version=version, python=python), python=python, version=version, host=host, @@ -213,7 +216,7 @@ def create_ocsp_variants() -> list[BuildVariant]: python = CPYTHONS[0] if version == "4.4" else CPYTHONS[-1] variant = create_variant( [".ocsp-rsa !.ocsp-staple"], - get_display_name(base_display, host, version, python), + get_display_name(base_display, host, version=version, python=python), python=python, version=version, host=host, @@ -540,7 +543,7 @@ def create_green_framework_variants(): return variants -def generate_no_c_ext_variants(): +def create_no_c_ext_variants(): variants = [] host = "rhel8" for python, topology in zip_cycle(CPYTHONS, TOPOLOGIES): @@ -555,7 +558,7 @@ def generate_no_c_ext_variants(): return variants -def generate_atlas_data_lake_variants(): +def create_atlas_data_lake_variants(): variants = [] host = "rhel8" for python, c_ext in product(MIN_MAX_PYTHON, C_EXTS): @@ -570,7 +573,7 @@ def generate_atlas_data_lake_variants(): return variants -def generate_mod_wsgi_variants(): +def create_mod_wsgi_variants(): variants = [] host = "ubuntu22" tasks = [ @@ -589,7 +592,7 @@ def generate_mod_wsgi_variants(): return variants -def generate_disable_test_commands_variants(): +def create_disable_test_commands_variants(): host = "rhel8" expansions = dict(AUTH="auth", SSL="ssl", DISABLE_TEST_COMMANDS="1") python = CPYTHONS[0] @@ -598,7 +601,7 @@ def generate_disable_test_commands_variants(): return [create_variant(tasks, display_name, host=host, python=python, expansions=expansions)] -def generate_serverless_variants(): +def create_serverless_variants(): host = "rhel8" batchtime = BATCHTIME_WEEK expansions = dict(test_serverless="true", AUTH="auth", SSL="ssl") @@ -617,7 +620,7 @@ def generate_serverless_variants(): ] -def generate_oidc_auth_variants(): +def create_oidc_auth_variants(): variants = [] for host in ["rhel8", "macos", "win64"]: variants.append( @@ -631,7 +634,7 @@ def generate_oidc_auth_variants(): return variants -def generate_search_index_variants(): +def create_search_index_variants(): host = "rhel8" python = CPYTHONS[0] return [ @@ -644,7 +647,7 @@ def generate_search_index_variants(): ] -def generate_mockupdb_variants(): +def create_mockupdb_variants(): host = "rhel8" python = CPYTHONS[0] return [ @@ -657,7 +660,7 @@ def generate_mockupdb_variants(): ] -def generate_doctests_variants(): +def create_doctests_variants(): host = "rhel8" python = CPYTHONS[0] return [ @@ -670,7 +673,7 @@ def generate_doctests_variants(): ] -def generate_atlas_connect_variants(): +def create_atlas_connect_variants(): host = "rhel8" return [ create_variant( @@ -683,7 +686,7 @@ def generate_atlas_connect_variants(): ] -def generate_aws_auth_variants(): +def create_aws_auth_variants(): variants = [] tasks = [ "aws-auth-test-4.4", @@ -713,7 +716,7 @@ def generate_aws_auth_variants(): return variants -def generate_alternative_hosts_variants(): +def create_alternative_hosts_variants(): base_expansions = dict(SKIP_HATCH="true") batchtime = BATCHTIME_WEEK variants = [] @@ -756,5 +759,35 @@ def generate_alternative_hosts_variants(): # Generate Config ################## -variants = generate_search_index_variants() -generate_yaml(variants=variants) + +def write_variants_to_file(): + mod = sys.modules[__name__] + here = Path(__file__).absolute().parent + target = here.parent / "generated_configs" / "variants.yml" + if target.exists(): + target.unlink() + with target.open("w") as fid: + fid.write("buildvariants:\n") + + for name, func in getmembers(mod, isfunction): + if not name.endswith("_variants"): + continue + if not name.startswith("create_"): + raise ValueError("Variant creators must start with create_") + title = name.replace("create_", "").replace("_variants", "").replace("_", " ").capitalize() + project = EvgProject(tasks=None, buildvariants=func()) + out = ShrubService.generate_yaml(project).splitlines() + with target.open("a") as fid: + fid.write(f" # {title} tests\n") + for line in out[1:]: + fid.write(f"{line}\n") + fid.write("\n") + + # Remove extra trailing newline: + data = target.read_text().splitlines() + with target.open("w") as fid: + for line in data[:-1]: + fid.write(f"{line}\n") + + +write_variants_to_file() From 215bca21ec2027e492eef07e89bc4d850eb01671 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Thu, 24 Oct 2024 10:30:30 -0500 Subject: [PATCH 070/182] PYTHON-4902 Use shrub.py to generate tasks (#1966) --- .evergreen/config.yml | 246 +- .evergreen/generated_configs/tasks.yml | 2882 +++++++++++++++++++++ .evergreen/generated_configs/variants.yml | 1202 ++------- .evergreen/scripts/generate_config.py | 173 +- 4 files changed, 3208 insertions(+), 1295 deletions(-) create mode 100644 .evergreen/generated_configs/tasks.yml diff --git a/.evergreen/config.yml b/.evergreen/config.yml index 6e48a380d3..a1587a281d 100644 --- a/.evergreen/config.yml +++ b/.evergreen/config.yml @@ -26,7 +26,8 @@ timeout: ls -la include: - - filename: .evergreen/generated_configs/variants.yml + - filename: .evergreen/generated_configs/tasks.yml + - filename: .evergreen/generated_configs/variants.yml functions: "fetch source": @@ -971,249 +972,6 @@ tasks: TOPOLOGY: "server" - func: "run doctests" - - name: "test-4.0-standalone" - tags: ["4.0", "standalone"] - commands: - - func: "bootstrap mongo-orchestration" - vars: - VERSION: "4.0" - TOPOLOGY: "server" - - func: "run tests" - - - name: "test-4.0-replica_set" - tags: ["4.0", "replica_set"] - commands: - - func: "bootstrap mongo-orchestration" - vars: - VERSION: "4.0" - TOPOLOGY: "replica_set" - - func: "run tests" - - - name: "test-4.0-sharded_cluster" - tags: ["4.0", "sharded_cluster"] - commands: - - func: "bootstrap mongo-orchestration" - vars: - VERSION: "4.0" - TOPOLOGY: "sharded_cluster" - - func: "run tests" - - - name: "test-4.2-standalone" - tags: ["4.2", "standalone"] - commands: - - func: "bootstrap mongo-orchestration" - vars: - VERSION: "4.2" - TOPOLOGY: "server" - - func: "run tests" - - - name: "test-4.2-replica_set" - tags: ["4.2", "replica_set"] - commands: - - func: "bootstrap mongo-orchestration" - vars: - VERSION: "4.2" - TOPOLOGY: "replica_set" - - func: "run tests" - - - name: "test-4.2-sharded_cluster" - tags: ["4.2", "sharded_cluster"] - commands: - - func: "bootstrap mongo-orchestration" - vars: - VERSION: "4.2" - TOPOLOGY: "sharded_cluster" - - func: "run tests" - - - name: "test-4.4-standalone" - tags: ["4.4", "standalone"] - commands: - - func: "bootstrap mongo-orchestration" - vars: - VERSION: "4.4" - TOPOLOGY: "server" - - func: "run tests" - - - name: "test-4.4-replica_set" - tags: ["4.4", "replica_set"] - commands: - - func: "bootstrap mongo-orchestration" - vars: - VERSION: "4.4" - TOPOLOGY: "replica_set" - - func: "run tests" - - - name: "test-4.4-sharded_cluster" - tags: ["4.4", "sharded_cluster"] - commands: - - func: "bootstrap mongo-orchestration" - vars: - VERSION: "4.4" - TOPOLOGY: "sharded_cluster" - - func: "run tests" - - - name: "test-5.0-standalone" - tags: ["5.0", "standalone"] - commands: - - func: "bootstrap mongo-orchestration" - vars: - VERSION: "5.0" - TOPOLOGY: "server" - - func: "run tests" - - - name: "test-5.0-replica_set" - tags: ["5.0", "replica_set"] - commands: - - func: "bootstrap mongo-orchestration" - vars: - VERSION: "5.0" - TOPOLOGY: "replica_set" - - func: "run tests" - - - name: "test-5.0-sharded_cluster" - tags: ["5.0", "sharded_cluster"] - commands: - - func: "bootstrap mongo-orchestration" - vars: - VERSION: "5.0" - TOPOLOGY: "sharded_cluster" - - func: "run tests" - - - name: "test-6.0-standalone" - tags: ["6.0", "standalone"] - commands: - - func: "bootstrap mongo-orchestration" - vars: - VERSION: "6.0" - TOPOLOGY: "server" - - func: "run tests" - - - name: "test-6.0-replica_set" - tags: ["6.0", "replica_set"] - commands: - - func: "bootstrap mongo-orchestration" - vars: - VERSION: "6.0" - TOPOLOGY: "replica_set" - - func: "run tests" - - - name: "test-6.0-sharded_cluster" - tags: ["6.0", "sharded_cluster"] - commands: - - func: "bootstrap mongo-orchestration" - vars: - VERSION: "6.0" - TOPOLOGY: "sharded_cluster" - - func: "run tests" - - - name: "test-8.0-standalone" - tags: ["8.0", "standalone"] - commands: - - func: "bootstrap mongo-orchestration" - vars: - VERSION: "8.0" - TOPOLOGY: "server" - - func: "run tests" - - - name: "test-8.0-replica_set" - tags: ["8.0", "replica_set"] - commands: - - func: "bootstrap mongo-orchestration" - vars: - VERSION: "8.0" - TOPOLOGY: "replica_set" - - func: "run tests" - - - name: "test-8.0-sharded_cluster" - tags: ["8.0", "sharded_cluster"] - commands: - - func: "bootstrap mongo-orchestration" - vars: - VERSION: "8.0" - TOPOLOGY: "sharded_cluster" - - func: "run tests" - - - name: "test-7.0-standalone" - tags: ["7.0", "standalone"] - commands: - - func: "bootstrap mongo-orchestration" - vars: - VERSION: "7.0" - TOPOLOGY: "server" - - func: "run tests" - - - name: "test-7.0-replica_set" - tags: ["7.0", "replica_set"] - commands: - - func: "bootstrap mongo-orchestration" - vars: - VERSION: "7.0" - TOPOLOGY: "replica_set" - - func: "run tests" - - - name: "test-7.0-sharded_cluster" - tags: ["7.0", "sharded_cluster"] - commands: - - func: "bootstrap mongo-orchestration" - vars: - VERSION: "7.0" - TOPOLOGY: "sharded_cluster" - - func: "run tests" - - - name: "test-latest-standalone" - tags: ["latest", "standalone"] - commands: - - func: "bootstrap mongo-orchestration" - vars: - VERSION: "latest" - TOPOLOGY: "server" - - func: "run tests" - - - name: "test-latest-replica_set" - tags: ["latest", "replica_set"] - commands: - - func: "bootstrap mongo-orchestration" - vars: - VERSION: "latest" - TOPOLOGY: "replica_set" - - func: "run tests" - - - name: "test-latest-sharded_cluster" - tags: ["latest", "sharded_cluster"] - commands: - - func: "bootstrap mongo-orchestration" - vars: - VERSION: "latest" - TOPOLOGY: "sharded_cluster" - - func: "run tests" - - - name: "test-rapid-standalone" - tags: ["rapid", "standalone"] - commands: - - func: "bootstrap mongo-orchestration" - vars: - VERSION: "rapid" - TOPOLOGY: "server" - - func: "run tests" - - - name: "test-rapid-replica_set" - tags: ["rapid", "replica_set"] - commands: - - func: "bootstrap mongo-orchestration" - vars: - VERSION: "rapid" - TOPOLOGY: "replica_set" - - func: "run tests" - - - name: "test-rapid-sharded_cluster" - tags: ["rapid", "sharded_cluster"] - commands: - - func: "bootstrap mongo-orchestration" - vars: - VERSION: "rapid" - TOPOLOGY: "sharded_cluster" - - func: "run tests" - - name: "test-serverless" tags: ["serverless"] commands: diff --git a/.evergreen/generated_configs/tasks.yml b/.evergreen/generated_configs/tasks.yml new file mode 100644 index 0000000000..fb3da4bb24 --- /dev/null +++ b/.evergreen/generated_configs/tasks.yml @@ -0,0 +1,2882 @@ +tasks: + # Server tests + - name: test-4.0-standalone-auth-ssl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.0" + TOPOLOGY: server + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: sync + TEST_SUITES: default + tags: + - "4.0" + - standalone + - auth + - ssl + - sync + - name: test-4.0-standalone-auth-ssl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.0" + TOPOLOGY: server + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: async + TEST_SUITES: default_async + tags: + - "4.0" + - standalone + - auth + - ssl + - async + - name: test-4.0-standalone-noauth-ssl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.0" + TOPOLOGY: server + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: sync + TEST_SUITES: default + tags: + - "4.0" + - standalone + - noauth + - ssl + - sync + - name: test-4.0-standalone-noauth-ssl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.0" + TOPOLOGY: server + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: async + TEST_SUITES: default_async + tags: + - "4.0" + - standalone + - noauth + - ssl + - async + - name: test-4.0-standalone-noauth-nossl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.0" + TOPOLOGY: server + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: sync + TEST_SUITES: default + tags: + - "4.0" + - standalone + - noauth + - nossl + - sync + - name: test-4.0-standalone-noauth-nossl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.0" + TOPOLOGY: server + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: async + TEST_SUITES: default_async + tags: + - "4.0" + - standalone + - noauth + - nossl + - async + - name: test-4.4-standalone-auth-ssl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.4" + TOPOLOGY: server + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: sync + TEST_SUITES: default + tags: + - "4.4" + - standalone + - auth + - ssl + - sync + - name: test-4.4-standalone-auth-ssl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.4" + TOPOLOGY: server + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: async + TEST_SUITES: default_async + tags: + - "4.4" + - standalone + - auth + - ssl + - async + - name: test-4.4-standalone-noauth-ssl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.4" + TOPOLOGY: server + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: sync + TEST_SUITES: default + tags: + - "4.4" + - standalone + - noauth + - ssl + - sync + - name: test-4.4-standalone-noauth-ssl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.4" + TOPOLOGY: server + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: async + TEST_SUITES: default_async + tags: + - "4.4" + - standalone + - noauth + - ssl + - async + - name: test-4.4-standalone-noauth-nossl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.4" + TOPOLOGY: server + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: sync + TEST_SUITES: default + tags: + - "4.4" + - standalone + - noauth + - nossl + - sync + - name: test-4.4-standalone-noauth-nossl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.4" + TOPOLOGY: server + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: async + TEST_SUITES: default_async + tags: + - "4.4" + - standalone + - noauth + - nossl + - async + - name: test-5.0-standalone-auth-ssl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "5.0" + TOPOLOGY: server + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: sync + TEST_SUITES: default + tags: + - "5.0" + - standalone + - auth + - ssl + - sync + - name: test-5.0-standalone-auth-ssl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "5.0" + TOPOLOGY: server + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: async + TEST_SUITES: default_async + tags: + - "5.0" + - standalone + - auth + - ssl + - async + - name: test-5.0-standalone-noauth-ssl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "5.0" + TOPOLOGY: server + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: sync + TEST_SUITES: default + tags: + - "5.0" + - standalone + - noauth + - ssl + - sync + - name: test-5.0-standalone-noauth-ssl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "5.0" + TOPOLOGY: server + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: async + TEST_SUITES: default_async + tags: + - "5.0" + - standalone + - noauth + - ssl + - async + - name: test-5.0-standalone-noauth-nossl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "5.0" + TOPOLOGY: server + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: sync + TEST_SUITES: default + tags: + - "5.0" + - standalone + - noauth + - nossl + - sync + - name: test-5.0-standalone-noauth-nossl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "5.0" + TOPOLOGY: server + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: async + TEST_SUITES: default_async + tags: + - "5.0" + - standalone + - noauth + - nossl + - async + - name: test-6.0-standalone-auth-ssl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "6.0" + TOPOLOGY: server + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: sync + TEST_SUITES: default + tags: + - "6.0" + - standalone + - auth + - ssl + - sync + - name: test-6.0-standalone-auth-ssl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "6.0" + TOPOLOGY: server + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: async + TEST_SUITES: default_async + tags: + - "6.0" + - standalone + - auth + - ssl + - async + - name: test-6.0-standalone-noauth-ssl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "6.0" + TOPOLOGY: server + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: sync + TEST_SUITES: default + tags: + - "6.0" + - standalone + - noauth + - ssl + - sync + - name: test-6.0-standalone-noauth-ssl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "6.0" + TOPOLOGY: server + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: async + TEST_SUITES: default_async + tags: + - "6.0" + - standalone + - noauth + - ssl + - async + - name: test-6.0-standalone-noauth-nossl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "6.0" + TOPOLOGY: server + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: sync + TEST_SUITES: default + tags: + - "6.0" + - standalone + - noauth + - nossl + - sync + - name: test-6.0-standalone-noauth-nossl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "6.0" + TOPOLOGY: server + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: async + TEST_SUITES: default_async + tags: + - "6.0" + - standalone + - noauth + - nossl + - async + - name: test-7.0-standalone-auth-ssl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "7.0" + TOPOLOGY: server + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: sync + TEST_SUITES: default + tags: + - "7.0" + - standalone + - auth + - ssl + - sync + - name: test-7.0-standalone-auth-ssl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "7.0" + TOPOLOGY: server + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: async + TEST_SUITES: default_async + tags: + - "7.0" + - standalone + - auth + - ssl + - async + - name: test-7.0-standalone-noauth-ssl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "7.0" + TOPOLOGY: server + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: sync + TEST_SUITES: default + tags: + - "7.0" + - standalone + - noauth + - ssl + - sync + - name: test-7.0-standalone-noauth-ssl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "7.0" + TOPOLOGY: server + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: async + TEST_SUITES: default_async + tags: + - "7.0" + - standalone + - noauth + - ssl + - async + - name: test-7.0-standalone-noauth-nossl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "7.0" + TOPOLOGY: server + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: sync + TEST_SUITES: default + tags: + - "7.0" + - standalone + - noauth + - nossl + - sync + - name: test-7.0-standalone-noauth-nossl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "7.0" + TOPOLOGY: server + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: async + TEST_SUITES: default_async + tags: + - "7.0" + - standalone + - noauth + - nossl + - async + - name: test-8.0-standalone-auth-ssl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "8.0" + TOPOLOGY: server + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: sync + TEST_SUITES: default + tags: + - "8.0" + - standalone + - auth + - ssl + - sync + - name: test-8.0-standalone-auth-ssl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "8.0" + TOPOLOGY: server + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: async + TEST_SUITES: default_async + tags: + - "8.0" + - standalone + - auth + - ssl + - async + - name: test-8.0-standalone-noauth-ssl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "8.0" + TOPOLOGY: server + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: sync + TEST_SUITES: default + tags: + - "8.0" + - standalone + - noauth + - ssl + - sync + - name: test-8.0-standalone-noauth-ssl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "8.0" + TOPOLOGY: server + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: async + TEST_SUITES: default_async + tags: + - "8.0" + - standalone + - noauth + - ssl + - async + - name: test-8.0-standalone-noauth-nossl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "8.0" + TOPOLOGY: server + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: sync + TEST_SUITES: default + tags: + - "8.0" + - standalone + - noauth + - nossl + - sync + - name: test-8.0-standalone-noauth-nossl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "8.0" + TOPOLOGY: server + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: async + TEST_SUITES: default_async + tags: + - "8.0" + - standalone + - noauth + - nossl + - async + - name: test-rapid-standalone-auth-ssl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: rapid + TOPOLOGY: server + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: sync + TEST_SUITES: default + tags: + - rapid + - standalone + - auth + - ssl + - sync + - name: test-rapid-standalone-auth-ssl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: rapid + TOPOLOGY: server + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: async + TEST_SUITES: default_async + tags: + - rapid + - standalone + - auth + - ssl + - async + - name: test-rapid-standalone-noauth-ssl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: rapid + TOPOLOGY: server + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: sync + TEST_SUITES: default + tags: + - rapid + - standalone + - noauth + - ssl + - sync + - name: test-rapid-standalone-noauth-ssl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: rapid + TOPOLOGY: server + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: async + TEST_SUITES: default_async + tags: + - rapid + - standalone + - noauth + - ssl + - async + - name: test-rapid-standalone-noauth-nossl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: rapid + TOPOLOGY: server + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: sync + TEST_SUITES: default + tags: + - rapid + - standalone + - noauth + - nossl + - sync + - name: test-rapid-standalone-noauth-nossl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: rapid + TOPOLOGY: server + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: async + TEST_SUITES: default_async + tags: + - rapid + - standalone + - noauth + - nossl + - async + - name: test-latest-standalone-auth-ssl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: latest + TOPOLOGY: server + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: sync + TEST_SUITES: default + tags: + - latest + - standalone + - auth + - ssl + - sync + - name: test-latest-standalone-auth-ssl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: latest + TOPOLOGY: server + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: async + TEST_SUITES: default_async + tags: + - latest + - standalone + - auth + - ssl + - async + - name: test-latest-standalone-noauth-ssl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: latest + TOPOLOGY: server + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: sync + TEST_SUITES: default + tags: + - latest + - standalone + - noauth + - ssl + - sync + - name: test-latest-standalone-noauth-ssl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: latest + TOPOLOGY: server + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: async + TEST_SUITES: default_async + tags: + - latest + - standalone + - noauth + - ssl + - async + - name: test-latest-standalone-noauth-nossl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: latest + TOPOLOGY: server + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: sync + TEST_SUITES: default + tags: + - latest + - standalone + - noauth + - nossl + - sync + - name: test-latest-standalone-noauth-nossl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: latest + TOPOLOGY: server + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: async + TEST_SUITES: default_async + tags: + - latest + - standalone + - noauth + - nossl + - async + - name: test-4.0-replica_set-auth-ssl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.0" + TOPOLOGY: replica_set + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: sync + TEST_SUITES: default + tags: + - "4.0" + - replica_set + - auth + - ssl + - sync + - name: test-4.0-replica_set-auth-ssl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.0" + TOPOLOGY: replica_set + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: async + TEST_SUITES: default_async + tags: + - "4.0" + - replica_set + - auth + - ssl + - async + - name: test-4.0-replica_set-noauth-ssl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.0" + TOPOLOGY: replica_set + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: sync + TEST_SUITES: default + tags: + - "4.0" + - replica_set + - noauth + - ssl + - sync + - name: test-4.0-replica_set-noauth-ssl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.0" + TOPOLOGY: replica_set + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: async + TEST_SUITES: default_async + tags: + - "4.0" + - replica_set + - noauth + - ssl + - async + - name: test-4.0-replica_set-noauth-nossl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.0" + TOPOLOGY: replica_set + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: sync + TEST_SUITES: default + tags: + - "4.0" + - replica_set + - noauth + - nossl + - sync + - name: test-4.0-replica_set-noauth-nossl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.0" + TOPOLOGY: replica_set + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: async + TEST_SUITES: default_async + tags: + - "4.0" + - replica_set + - noauth + - nossl + - async + - name: test-4.4-replica_set-auth-ssl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.4" + TOPOLOGY: replica_set + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: sync + TEST_SUITES: default + tags: + - "4.4" + - replica_set + - auth + - ssl + - sync + - name: test-4.4-replica_set-auth-ssl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.4" + TOPOLOGY: replica_set + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: async + TEST_SUITES: default_async + tags: + - "4.4" + - replica_set + - auth + - ssl + - async + - name: test-4.4-replica_set-noauth-ssl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.4" + TOPOLOGY: replica_set + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: sync + TEST_SUITES: default + tags: + - "4.4" + - replica_set + - noauth + - ssl + - sync + - name: test-4.4-replica_set-noauth-ssl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.4" + TOPOLOGY: replica_set + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: async + TEST_SUITES: default_async + tags: + - "4.4" + - replica_set + - noauth + - ssl + - async + - name: test-4.4-replica_set-noauth-nossl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.4" + TOPOLOGY: replica_set + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: sync + TEST_SUITES: default + tags: + - "4.4" + - replica_set + - noauth + - nossl + - sync + - name: test-4.4-replica_set-noauth-nossl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.4" + TOPOLOGY: replica_set + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: async + TEST_SUITES: default_async + tags: + - "4.4" + - replica_set + - noauth + - nossl + - async + - name: test-5.0-replica_set-auth-ssl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "5.0" + TOPOLOGY: replica_set + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: sync + TEST_SUITES: default + tags: + - "5.0" + - replica_set + - auth + - ssl + - sync + - name: test-5.0-replica_set-auth-ssl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "5.0" + TOPOLOGY: replica_set + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: async + TEST_SUITES: default_async + tags: + - "5.0" + - replica_set + - auth + - ssl + - async + - name: test-5.0-replica_set-noauth-ssl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "5.0" + TOPOLOGY: replica_set + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: sync + TEST_SUITES: default + tags: + - "5.0" + - replica_set + - noauth + - ssl + - sync + - name: test-5.0-replica_set-noauth-ssl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "5.0" + TOPOLOGY: replica_set + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: async + TEST_SUITES: default_async + tags: + - "5.0" + - replica_set + - noauth + - ssl + - async + - name: test-5.0-replica_set-noauth-nossl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "5.0" + TOPOLOGY: replica_set + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: sync + TEST_SUITES: default + tags: + - "5.0" + - replica_set + - noauth + - nossl + - sync + - name: test-5.0-replica_set-noauth-nossl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "5.0" + TOPOLOGY: replica_set + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: async + TEST_SUITES: default_async + tags: + - "5.0" + - replica_set + - noauth + - nossl + - async + - name: test-6.0-replica_set-auth-ssl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "6.0" + TOPOLOGY: replica_set + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: sync + TEST_SUITES: default + tags: + - "6.0" + - replica_set + - auth + - ssl + - sync + - name: test-6.0-replica_set-auth-ssl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "6.0" + TOPOLOGY: replica_set + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: async + TEST_SUITES: default_async + tags: + - "6.0" + - replica_set + - auth + - ssl + - async + - name: test-6.0-replica_set-noauth-ssl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "6.0" + TOPOLOGY: replica_set + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: sync + TEST_SUITES: default + tags: + - "6.0" + - replica_set + - noauth + - ssl + - sync + - name: test-6.0-replica_set-noauth-ssl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "6.0" + TOPOLOGY: replica_set + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: async + TEST_SUITES: default_async + tags: + - "6.0" + - replica_set + - noauth + - ssl + - async + - name: test-6.0-replica_set-noauth-nossl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "6.0" + TOPOLOGY: replica_set + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: sync + TEST_SUITES: default + tags: + - "6.0" + - replica_set + - noauth + - nossl + - sync + - name: test-6.0-replica_set-noauth-nossl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "6.0" + TOPOLOGY: replica_set + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: async + TEST_SUITES: default_async + tags: + - "6.0" + - replica_set + - noauth + - nossl + - async + - name: test-7.0-replica_set-auth-ssl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "7.0" + TOPOLOGY: replica_set + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: sync + TEST_SUITES: default + tags: + - "7.0" + - replica_set + - auth + - ssl + - sync + - name: test-7.0-replica_set-auth-ssl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "7.0" + TOPOLOGY: replica_set + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: async + TEST_SUITES: default_async + tags: + - "7.0" + - replica_set + - auth + - ssl + - async + - name: test-7.0-replica_set-noauth-ssl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "7.0" + TOPOLOGY: replica_set + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: sync + TEST_SUITES: default + tags: + - "7.0" + - replica_set + - noauth + - ssl + - sync + - name: test-7.0-replica_set-noauth-ssl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "7.0" + TOPOLOGY: replica_set + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: async + TEST_SUITES: default_async + tags: + - "7.0" + - replica_set + - noauth + - ssl + - async + - name: test-7.0-replica_set-noauth-nossl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "7.0" + TOPOLOGY: replica_set + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: sync + TEST_SUITES: default + tags: + - "7.0" + - replica_set + - noauth + - nossl + - sync + - name: test-7.0-replica_set-noauth-nossl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "7.0" + TOPOLOGY: replica_set + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: async + TEST_SUITES: default_async + tags: + - "7.0" + - replica_set + - noauth + - nossl + - async + - name: test-8.0-replica_set-auth-ssl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "8.0" + TOPOLOGY: replica_set + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: sync + TEST_SUITES: default + tags: + - "8.0" + - replica_set + - auth + - ssl + - sync + - name: test-8.0-replica_set-auth-ssl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "8.0" + TOPOLOGY: replica_set + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: async + TEST_SUITES: default_async + tags: + - "8.0" + - replica_set + - auth + - ssl + - async + - name: test-8.0-replica_set-noauth-ssl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "8.0" + TOPOLOGY: replica_set + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: sync + TEST_SUITES: default + tags: + - "8.0" + - replica_set + - noauth + - ssl + - sync + - name: test-8.0-replica_set-noauth-ssl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "8.0" + TOPOLOGY: replica_set + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: async + TEST_SUITES: default_async + tags: + - "8.0" + - replica_set + - noauth + - ssl + - async + - name: test-8.0-replica_set-noauth-nossl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "8.0" + TOPOLOGY: replica_set + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: sync + TEST_SUITES: default + tags: + - "8.0" + - replica_set + - noauth + - nossl + - sync + - name: test-8.0-replica_set-noauth-nossl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "8.0" + TOPOLOGY: replica_set + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: async + TEST_SUITES: default_async + tags: + - "8.0" + - replica_set + - noauth + - nossl + - async + - name: test-rapid-replica_set-auth-ssl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: rapid + TOPOLOGY: replica_set + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: sync + TEST_SUITES: default + tags: + - rapid + - replica_set + - auth + - ssl + - sync + - name: test-rapid-replica_set-auth-ssl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: rapid + TOPOLOGY: replica_set + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: async + TEST_SUITES: default_async + tags: + - rapid + - replica_set + - auth + - ssl + - async + - name: test-rapid-replica_set-noauth-ssl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: rapid + TOPOLOGY: replica_set + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: sync + TEST_SUITES: default + tags: + - rapid + - replica_set + - noauth + - ssl + - sync + - name: test-rapid-replica_set-noauth-ssl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: rapid + TOPOLOGY: replica_set + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: async + TEST_SUITES: default_async + tags: + - rapid + - replica_set + - noauth + - ssl + - async + - name: test-rapid-replica_set-noauth-nossl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: rapid + TOPOLOGY: replica_set + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: sync + TEST_SUITES: default + tags: + - rapid + - replica_set + - noauth + - nossl + - sync + - name: test-rapid-replica_set-noauth-nossl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: rapid + TOPOLOGY: replica_set + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: async + TEST_SUITES: default_async + tags: + - rapid + - replica_set + - noauth + - nossl + - async + - name: test-latest-replica_set-auth-ssl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: latest + TOPOLOGY: replica_set + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: sync + TEST_SUITES: default + tags: + - latest + - replica_set + - auth + - ssl + - sync + - name: test-latest-replica_set-auth-ssl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: latest + TOPOLOGY: replica_set + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: async + TEST_SUITES: default_async + tags: + - latest + - replica_set + - auth + - ssl + - async + - name: test-latest-replica_set-noauth-ssl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: latest + TOPOLOGY: replica_set + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: sync + TEST_SUITES: default + tags: + - latest + - replica_set + - noauth + - ssl + - sync + - name: test-latest-replica_set-noauth-ssl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: latest + TOPOLOGY: replica_set + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: async + TEST_SUITES: default_async + tags: + - latest + - replica_set + - noauth + - ssl + - async + - name: test-latest-replica_set-noauth-nossl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: latest + TOPOLOGY: replica_set + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: sync + TEST_SUITES: default + tags: + - latest + - replica_set + - noauth + - nossl + - sync + - name: test-latest-replica_set-noauth-nossl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: latest + TOPOLOGY: replica_set + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: async + TEST_SUITES: default_async + tags: + - latest + - replica_set + - noauth + - nossl + - async + - name: test-4.0-sharded_cluster-auth-ssl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.0" + TOPOLOGY: sharded_cluster + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: sync + TEST_SUITES: default + tags: + - "4.0" + - sharded_cluster + - auth + - ssl + - sync + - name: test-4.0-sharded_cluster-auth-ssl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.0" + TOPOLOGY: sharded_cluster + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: async + TEST_SUITES: default_async + tags: + - "4.0" + - sharded_cluster + - auth + - ssl + - async + - name: test-4.0-sharded_cluster-noauth-ssl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.0" + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: sync + TEST_SUITES: default + tags: + - "4.0" + - sharded_cluster + - noauth + - ssl + - sync + - name: test-4.0-sharded_cluster-noauth-ssl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.0" + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: async + TEST_SUITES: default_async + tags: + - "4.0" + - sharded_cluster + - noauth + - ssl + - async + - name: test-4.0-sharded_cluster-noauth-nossl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.0" + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: sync + TEST_SUITES: default + tags: + - "4.0" + - sharded_cluster + - noauth + - nossl + - sync + - name: test-4.0-sharded_cluster-noauth-nossl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.0" + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: async + TEST_SUITES: default_async + tags: + - "4.0" + - sharded_cluster + - noauth + - nossl + - async + - name: test-4.4-sharded_cluster-auth-ssl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.4" + TOPOLOGY: sharded_cluster + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: sync + TEST_SUITES: default + tags: + - "4.4" + - sharded_cluster + - auth + - ssl + - sync + - name: test-4.4-sharded_cluster-auth-ssl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.4" + TOPOLOGY: sharded_cluster + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: async + TEST_SUITES: default_async + tags: + - "4.4" + - sharded_cluster + - auth + - ssl + - async + - name: test-4.4-sharded_cluster-noauth-ssl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.4" + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: sync + TEST_SUITES: default + tags: + - "4.4" + - sharded_cluster + - noauth + - ssl + - sync + - name: test-4.4-sharded_cluster-noauth-ssl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.4" + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: async + TEST_SUITES: default_async + tags: + - "4.4" + - sharded_cluster + - noauth + - ssl + - async + - name: test-4.4-sharded_cluster-noauth-nossl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.4" + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: sync + TEST_SUITES: default + tags: + - "4.4" + - sharded_cluster + - noauth + - nossl + - sync + - name: test-4.4-sharded_cluster-noauth-nossl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.4" + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: async + TEST_SUITES: default_async + tags: + - "4.4" + - sharded_cluster + - noauth + - nossl + - async + - name: test-5.0-sharded_cluster-auth-ssl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "5.0" + TOPOLOGY: sharded_cluster + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: sync + TEST_SUITES: default + tags: + - "5.0" + - sharded_cluster + - auth + - ssl + - sync + - name: test-5.0-sharded_cluster-auth-ssl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "5.0" + TOPOLOGY: sharded_cluster + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: async + TEST_SUITES: default_async + tags: + - "5.0" + - sharded_cluster + - auth + - ssl + - async + - name: test-5.0-sharded_cluster-noauth-ssl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "5.0" + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: sync + TEST_SUITES: default + tags: + - "5.0" + - sharded_cluster + - noauth + - ssl + - sync + - name: test-5.0-sharded_cluster-noauth-ssl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "5.0" + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: async + TEST_SUITES: default_async + tags: + - "5.0" + - sharded_cluster + - noauth + - ssl + - async + - name: test-5.0-sharded_cluster-noauth-nossl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "5.0" + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: sync + TEST_SUITES: default + tags: + - "5.0" + - sharded_cluster + - noauth + - nossl + - sync + - name: test-5.0-sharded_cluster-noauth-nossl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "5.0" + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: async + TEST_SUITES: default_async + tags: + - "5.0" + - sharded_cluster + - noauth + - nossl + - async + - name: test-6.0-sharded_cluster-auth-ssl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "6.0" + TOPOLOGY: sharded_cluster + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: sync + TEST_SUITES: default + tags: + - "6.0" + - sharded_cluster + - auth + - ssl + - sync + - name: test-6.0-sharded_cluster-auth-ssl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "6.0" + TOPOLOGY: sharded_cluster + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: async + TEST_SUITES: default_async + tags: + - "6.0" + - sharded_cluster + - auth + - ssl + - async + - name: test-6.0-sharded_cluster-noauth-ssl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "6.0" + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: sync + TEST_SUITES: default + tags: + - "6.0" + - sharded_cluster + - noauth + - ssl + - sync + - name: test-6.0-sharded_cluster-noauth-ssl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "6.0" + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: async + TEST_SUITES: default_async + tags: + - "6.0" + - sharded_cluster + - noauth + - ssl + - async + - name: test-6.0-sharded_cluster-noauth-nossl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "6.0" + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: sync + TEST_SUITES: default + tags: + - "6.0" + - sharded_cluster + - noauth + - nossl + - sync + - name: test-6.0-sharded_cluster-noauth-nossl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "6.0" + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: async + TEST_SUITES: default_async + tags: + - "6.0" + - sharded_cluster + - noauth + - nossl + - async + - name: test-7.0-sharded_cluster-auth-ssl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "7.0" + TOPOLOGY: sharded_cluster + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: sync + TEST_SUITES: default + tags: + - "7.0" + - sharded_cluster + - auth + - ssl + - sync + - name: test-7.0-sharded_cluster-auth-ssl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "7.0" + TOPOLOGY: sharded_cluster + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: async + TEST_SUITES: default_async + tags: + - "7.0" + - sharded_cluster + - auth + - ssl + - async + - name: test-7.0-sharded_cluster-noauth-ssl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "7.0" + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: sync + TEST_SUITES: default + tags: + - "7.0" + - sharded_cluster + - noauth + - ssl + - sync + - name: test-7.0-sharded_cluster-noauth-ssl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "7.0" + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: async + TEST_SUITES: default_async + tags: + - "7.0" + - sharded_cluster + - noauth + - ssl + - async + - name: test-7.0-sharded_cluster-noauth-nossl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "7.0" + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: sync + TEST_SUITES: default + tags: + - "7.0" + - sharded_cluster + - noauth + - nossl + - sync + - name: test-7.0-sharded_cluster-noauth-nossl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "7.0" + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: async + TEST_SUITES: default_async + tags: + - "7.0" + - sharded_cluster + - noauth + - nossl + - async + - name: test-8.0-sharded_cluster-auth-ssl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "8.0" + TOPOLOGY: sharded_cluster + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: sync + TEST_SUITES: default + tags: + - "8.0" + - sharded_cluster + - auth + - ssl + - sync + - name: test-8.0-sharded_cluster-auth-ssl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "8.0" + TOPOLOGY: sharded_cluster + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: async + TEST_SUITES: default_async + tags: + - "8.0" + - sharded_cluster + - auth + - ssl + - async + - name: test-8.0-sharded_cluster-noauth-ssl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "8.0" + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: sync + TEST_SUITES: default + tags: + - "8.0" + - sharded_cluster + - noauth + - ssl + - sync + - name: test-8.0-sharded_cluster-noauth-ssl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "8.0" + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: async + TEST_SUITES: default_async + tags: + - "8.0" + - sharded_cluster + - noauth + - ssl + - async + - name: test-8.0-sharded_cluster-noauth-nossl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "8.0" + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: sync + TEST_SUITES: default + tags: + - "8.0" + - sharded_cluster + - noauth + - nossl + - sync + - name: test-8.0-sharded_cluster-noauth-nossl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "8.0" + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: async + TEST_SUITES: default_async + tags: + - "8.0" + - sharded_cluster + - noauth + - nossl + - async + - name: test-rapid-sharded_cluster-auth-ssl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: rapid + TOPOLOGY: sharded_cluster + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: sync + TEST_SUITES: default + tags: + - rapid + - sharded_cluster + - auth + - ssl + - sync + - name: test-rapid-sharded_cluster-auth-ssl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: rapid + TOPOLOGY: sharded_cluster + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: async + TEST_SUITES: default_async + tags: + - rapid + - sharded_cluster + - auth + - ssl + - async + - name: test-rapid-sharded_cluster-noauth-ssl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: rapid + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: sync + TEST_SUITES: default + tags: + - rapid + - sharded_cluster + - noauth + - ssl + - sync + - name: test-rapid-sharded_cluster-noauth-ssl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: rapid + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: async + TEST_SUITES: default_async + tags: + - rapid + - sharded_cluster + - noauth + - ssl + - async + - name: test-rapid-sharded_cluster-noauth-nossl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: rapid + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: sync + TEST_SUITES: default + tags: + - rapid + - sharded_cluster + - noauth + - nossl + - sync + - name: test-rapid-sharded_cluster-noauth-nossl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: rapid + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: async + TEST_SUITES: default_async + tags: + - rapid + - sharded_cluster + - noauth + - nossl + - async + - name: test-latest-sharded_cluster-auth-ssl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: latest + TOPOLOGY: sharded_cluster + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: sync + TEST_SUITES: default + tags: + - latest + - sharded_cluster + - auth + - ssl + - sync + - name: test-latest-sharded_cluster-auth-ssl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: latest + TOPOLOGY: sharded_cluster + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: async + TEST_SUITES: default_async + tags: + - latest + - sharded_cluster + - auth + - ssl + - async + - name: test-latest-sharded_cluster-noauth-ssl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: latest + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: sync + TEST_SUITES: default + tags: + - latest + - sharded_cluster + - noauth + - ssl + - sync + - name: test-latest-sharded_cluster-noauth-ssl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: latest + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: async + TEST_SUITES: default_async + tags: + - latest + - sharded_cluster + - noauth + - ssl + - async + - name: test-latest-sharded_cluster-noauth-nossl-sync + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: latest + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: sync + TEST_SUITES: default + tags: + - latest + - sharded_cluster + - noauth + - nossl + - sync + - name: test-latest-sharded_cluster-noauth-nossl-async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: latest + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: async + TEST_SUITES: default_async + tags: + - latest + - sharded_cluster + - noauth + - nossl + - async diff --git a/.evergreen/generated_configs/variants.yml b/.evergreen/generated_configs/variants.yml index 52f8c673b3..327becc249 100644 --- a/.evergreen/generated_configs/variants.yml +++ b/.evergreen/generated_configs/variants.yml @@ -1,173 +1,51 @@ buildvariants: # Alternative hosts tests - - name: openssl-1.0.2-rhel7-py3.9-auth-ssl + - name: openssl-1.0.2-rhel7-py3.9 tasks: - name: .5.0 .standalone - display_name: OpenSSL 1.0.2 RHEL7 py3.9 Auth SSL + display_name: OpenSSL 1.0.2 RHEL7 py3.9 run_on: - rhel79-small batchtime: 10080 expansions: SKIP_HATCH: "true" - AUTH: auth - SSL: ssl - PYTHON_BINARY: /opt/python/3.9/bin/python3 - - name: openssl-1.0.2-rhel7-py3.9-noauth-ssl - tasks: - - name: .5.0 .standalone - display_name: OpenSSL 1.0.2 RHEL7 py3.9 NoAuth SSL - run_on: - - rhel79-small - batchtime: 10080 - expansions: - SKIP_HATCH: "true" - AUTH: noauth - SSL: ssl - PYTHON_BINARY: /opt/python/3.9/bin/python3 - - name: openssl-1.0.2-rhel7-py3.9-noauth-nossl - tasks: - - name: .5.0 .standalone - display_name: OpenSSL 1.0.2 RHEL7 py3.9 NoAuth NoSSL - run_on: - - rhel79-small - batchtime: 10080 - expansions: - SKIP_HATCH: "true" - AUTH: noauth - SSL: nossl PYTHON_BINARY: /opt/python/3.9/bin/python3 - - name: other-hosts-rhel9-fips-auth-ssl - tasks: - - name: .6.0 .standalone - display_name: Other hosts RHEL9-FIPS Auth SSL - run_on: - - rhel92-fips - batchtime: 10080 - expansions: - SKIP_HATCH: "true" - AUTH: auth - SSL: ssl - - name: other-hosts-rhel9-fips-noauth-ssl - tasks: - - name: .6.0 .standalone - display_name: Other hosts RHEL9-FIPS NoAuth SSL - run_on: - - rhel92-fips - batchtime: 10080 - expansions: - SKIP_HATCH: "true" - AUTH: noauth - SSL: ssl - - name: other-hosts-rhel9-fips-noauth-nossl + - name: other-hosts-rhel9-fips tasks: - name: .6.0 .standalone - display_name: Other hosts RHEL9-FIPS NoAuth NoSSL + display_name: Other hosts RHEL9-FIPS run_on: - rhel92-fips batchtime: 10080 expansions: SKIP_HATCH: "true" - AUTH: noauth - SSL: nossl - - name: other-hosts-rhel8-zseries-auth-ssl - tasks: - - name: .6.0 .standalone - display_name: Other hosts RHEL8-zseries Auth SSL - run_on: - - rhel8-zseries-small - batchtime: 10080 - expansions: - SKIP_HATCH: "true" - AUTH: auth - SSL: ssl - - name: other-hosts-rhel8-zseries-noauth-ssl - tasks: - - name: .6.0 .standalone - display_name: Other hosts RHEL8-zseries NoAuth SSL - run_on: - - rhel8-zseries-small - batchtime: 10080 - expansions: - SKIP_HATCH: "true" - AUTH: noauth - SSL: ssl - - name: other-hosts-rhel8-zseries-noauth-nossl + - name: other-hosts-rhel8-zseries tasks: - name: .6.0 .standalone - display_name: Other hosts RHEL8-zseries NoAuth NoSSL + display_name: Other hosts RHEL8-zseries run_on: - rhel8-zseries-small batchtime: 10080 expansions: SKIP_HATCH: "true" - AUTH: noauth - SSL: nossl - - name: other-hosts-rhel8-power8-auth-ssl - tasks: - - name: .6.0 .standalone - display_name: Other hosts RHEL8-POWER8 Auth SSL - run_on: - - rhel8-power-small - batchtime: 10080 - expansions: - SKIP_HATCH: "true" - AUTH: auth - SSL: ssl - - name: other-hosts-rhel8-power8-noauth-ssl - tasks: - - name: .6.0 .standalone - display_name: Other hosts RHEL8-POWER8 NoAuth SSL - run_on: - - rhel8-power-small - batchtime: 10080 - expansions: - SKIP_HATCH: "true" - AUTH: noauth - SSL: ssl - - name: other-hosts-rhel8-power8-noauth-nossl + - name: other-hosts-rhel8-power8 tasks: - name: .6.0 .standalone - display_name: Other hosts RHEL8-POWER8 NoAuth NoSSL + display_name: Other hosts RHEL8-POWER8 run_on: - rhel8-power-small batchtime: 10080 expansions: SKIP_HATCH: "true" - AUTH: noauth - SSL: nossl - - name: other-hosts-rhel8-arm64-auth-ssl - tasks: - - name: .6.0 .standalone - display_name: Other hosts RHEL8-arm64 Auth SSL - run_on: - - rhel82-arm64-small - batchtime: 10080 - expansions: - SKIP_HATCH: "true" - AUTH: auth - SSL: ssl - - name: other-hosts-rhel8-arm64-noauth-ssl - tasks: - - name: .6.0 .standalone - display_name: Other hosts RHEL8-arm64 NoAuth SSL - run_on: - - rhel82-arm64-small - batchtime: 10080 - expansions: - SKIP_HATCH: "true" - AUTH: noauth - SSL: ssl - - name: other-hosts-rhel8-arm64-noauth-nossl + - name: other-hosts-rhel8-arm64 tasks: - name: .6.0 .standalone - display_name: Other hosts RHEL8-arm64 NoAuth NoSSL + display_name: Other hosts RHEL8-arm64 run_on: - rhel82-arm64-small batchtime: 10080 expansions: SKIP_HATCH: "true" - AUTH: noauth - SSL: nossl # Atlas connect tests - name: atlas-connect-rhel8-py3.9 @@ -320,7 +198,7 @@ buildvariants: # Compression tests - name: snappy-compression-rhel8-py3.9-no-c tasks: - - name: .standalone + - name: .standalone .noauth .nossl display_name: snappy compression RHEL8 py3.9 No C run_on: - rhel87-small @@ -330,7 +208,7 @@ buildvariants: PYTHON_BINARY: /opt/python/3.9/bin/python3 - name: snappy-compression-rhel8-py3.10 tasks: - - name: .standalone + - name: .standalone .noauth .nossl display_name: snappy compression RHEL8 py3.10 run_on: - rhel87-small @@ -339,7 +217,7 @@ buildvariants: PYTHON_BINARY: /opt/python/3.10/bin/python3 - name: zlib-compression-rhel8-py3.11-no-c tasks: - - name: .standalone + - name: .standalone .noauth .nossl display_name: zlib compression RHEL8 py3.11 No C run_on: - rhel87-small @@ -349,7 +227,7 @@ buildvariants: PYTHON_BINARY: /opt/python/3.11/bin/python3 - name: zlib-compression-rhel8-py3.12 tasks: - - name: .standalone + - name: .standalone .noauth .nossl display_name: zlib compression RHEL8 py3.12 run_on: - rhel87-small @@ -358,7 +236,7 @@ buildvariants: PYTHON_BINARY: /opt/python/3.12/bin/python3 - name: zstd-compression-rhel8-py3.13-no-c tasks: - - name: .standalone !.4.0 + - name: .standalone .noauth .nossl !.4.0 display_name: zstd compression RHEL8 py3.13 No C run_on: - rhel87-small @@ -368,7 +246,7 @@ buildvariants: PYTHON_BINARY: /opt/python/3.13/bin/python3 - name: zstd-compression-rhel8-py3.9 tasks: - - name: .standalone !.4.0 + - name: .standalone .noauth .nossl !.4.0 display_name: zstd compression RHEL8 py3.9 run_on: - rhel87-small @@ -377,7 +255,7 @@ buildvariants: PYTHON_BINARY: /opt/python/3.9/bin/python3 - name: snappy-compression-rhel8-pypy3.9 tasks: - - name: .standalone + - name: .standalone .noauth .nossl display_name: snappy compression RHEL8 pypy3.9 run_on: - rhel87-small @@ -386,7 +264,7 @@ buildvariants: PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 - name: zlib-compression-rhel8-pypy3.10 tasks: - - name: .standalone + - name: .standalone .noauth .nossl display_name: zlib compression RHEL8 pypy3.10 run_on: - rhel87-small @@ -395,7 +273,7 @@ buildvariants: PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 - name: zstd-compression-rhel8-pypy3.9 tasks: - - name: .standalone !.4.0 + - name: .standalone .noauth .nossl !.4.0 display_name: zstd compression RHEL8 pypy3.9 run_on: - rhel87-small @@ -427,297 +305,255 @@ buildvariants: PYTHON_BINARY: /opt/python/3.9/bin/python3 # Encryption tests - - name: encryption-rhel8-py3.9-auth-ssl + - name: encryption-rhel8-py3.9 tasks: - - name: .standalone - - name: .replica_set - - name: .sharded_cluster - display_name: Encryption RHEL8 py3.9 Auth SSL + - name: .sharded_cluster .auth .ssl + - name: .replica_set .noauth .ssl + - name: .standalone .noauth .nossl + display_name: Encryption RHEL8 py3.9 run_on: - rhel87-small batchtime: 10080 expansions: - AUTH: auth - SSL: ssl test_encryption: "true" PYTHON_BINARY: /opt/python/3.9/bin/python3 tags: [encryption_tag] - - name: encryption-rhel8-py3.13-auth-ssl + - name: encryption-rhel8-py3.13 tasks: - - name: .standalone - - name: .replica_set - - name: .sharded_cluster - display_name: Encryption RHEL8 py3.13 Auth SSL + - name: .sharded_cluster .auth .ssl + - name: .replica_set .noauth .ssl + - name: .standalone .noauth .nossl + display_name: Encryption RHEL8 py3.13 run_on: - rhel87-small batchtime: 10080 expansions: - AUTH: auth - SSL: ssl test_encryption: "true" PYTHON_BINARY: /opt/python/3.13/bin/python3 tags: [encryption_tag] - - name: encryption-rhel8-pypy3.10-auth-ssl + - name: encryption-rhel8-pypy3.10 tasks: - - name: .standalone - - name: .replica_set - - name: .sharded_cluster - display_name: Encryption RHEL8 pypy3.10 Auth SSL + - name: .sharded_cluster .auth .ssl + - name: .replica_set .noauth .ssl + - name: .standalone .noauth .nossl + display_name: Encryption RHEL8 pypy3.10 run_on: - rhel87-small batchtime: 10080 expansions: - AUTH: auth - SSL: ssl test_encryption: "true" PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 tags: [encryption_tag] - - name: encryption-crypt_shared-rhel8-py3.9-auth-ssl + - name: encryption-crypt_shared-rhel8-py3.9 tasks: - - name: .standalone - - name: .replica_set - - name: .sharded_cluster - display_name: Encryption crypt_shared RHEL8 py3.9 Auth SSL + - name: .sharded_cluster .auth .ssl + - name: .replica_set .noauth .ssl + - name: .standalone .noauth .nossl + display_name: Encryption crypt_shared RHEL8 py3.9 run_on: - rhel87-small batchtime: 10080 expansions: - AUTH: auth - SSL: ssl test_encryption: "true" test_crypt_shared: "true" PYTHON_BINARY: /opt/python/3.9/bin/python3 tags: [encryption_tag] - - name: encryption-crypt_shared-rhel8-py3.13-auth-ssl + - name: encryption-crypt_shared-rhel8-py3.13 tasks: - - name: .standalone - - name: .replica_set - - name: .sharded_cluster - display_name: Encryption crypt_shared RHEL8 py3.13 Auth SSL + - name: .sharded_cluster .auth .ssl + - name: .replica_set .noauth .ssl + - name: .standalone .noauth .nossl + display_name: Encryption crypt_shared RHEL8 py3.13 run_on: - rhel87-small batchtime: 10080 expansions: - AUTH: auth - SSL: ssl test_encryption: "true" test_crypt_shared: "true" PYTHON_BINARY: /opt/python/3.13/bin/python3 tags: [encryption_tag] - - name: encryption-crypt_shared-rhel8-pypy3.10-auth-ssl + - name: encryption-crypt_shared-rhel8-pypy3.10 tasks: - - name: .standalone - - name: .replica_set - - name: .sharded_cluster - display_name: Encryption crypt_shared RHEL8 pypy3.10 Auth SSL + - name: .sharded_cluster .auth .ssl + - name: .replica_set .noauth .ssl + - name: .standalone .noauth .nossl + display_name: Encryption crypt_shared RHEL8 pypy3.10 run_on: - rhel87-small batchtime: 10080 expansions: - AUTH: auth - SSL: ssl test_encryption: "true" test_crypt_shared: "true" PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 tags: [encryption_tag] - - name: encryption-pyopenssl-rhel8-py3.9-auth-ssl + - name: encryption-pyopenssl-rhel8-py3.9 tasks: - - name: .standalone - - name: .replica_set - - name: .sharded_cluster - display_name: Encryption PyOpenSSL RHEL8 py3.9 Auth SSL + - name: .sharded_cluster .auth .ssl + - name: .replica_set .noauth .ssl + - name: .standalone .noauth .nossl + display_name: Encryption PyOpenSSL RHEL8 py3.9 run_on: - rhel87-small batchtime: 10080 expansions: - AUTH: auth - SSL: ssl test_encryption: "true" test_encryption_pyopenssl: "true" PYTHON_BINARY: /opt/python/3.9/bin/python3 tags: [encryption_tag] - - name: encryption-pyopenssl-rhel8-py3.13-auth-ssl + - name: encryption-pyopenssl-rhel8-py3.13 tasks: - - name: .standalone - - name: .replica_set - - name: .sharded_cluster - display_name: Encryption PyOpenSSL RHEL8 py3.13 Auth SSL + - name: .sharded_cluster .auth .ssl + - name: .replica_set .noauth .ssl + - name: .standalone .noauth .nossl + display_name: Encryption PyOpenSSL RHEL8 py3.13 run_on: - rhel87-small batchtime: 10080 expansions: - AUTH: auth - SSL: ssl test_encryption: "true" test_encryption_pyopenssl: "true" PYTHON_BINARY: /opt/python/3.13/bin/python3 tags: [encryption_tag] - - name: encryption-pyopenssl-rhel8-pypy3.10-auth-ssl + - name: encryption-pyopenssl-rhel8-pypy3.10 tasks: - - name: .standalone - - name: .replica_set - - name: .sharded_cluster - display_name: Encryption PyOpenSSL RHEL8 pypy3.10 Auth SSL + - name: .sharded_cluster .auth .ssl + - name: .replica_set .noauth .ssl + - name: .standalone .noauth .nossl + display_name: Encryption PyOpenSSL RHEL8 pypy3.10 run_on: - rhel87-small batchtime: 10080 expansions: - AUTH: auth - SSL: ssl test_encryption: "true" test_encryption_pyopenssl: "true" PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 tags: [encryption_tag] - - name: encryption-rhel8-py3.10-auth-ssl + - name: encryption-rhel8-py3.10 tasks: - - name: .replica_set - display_name: Encryption RHEL8 py3.10 Auth SSL + - name: .sharded_cluster .auth .ssl + display_name: Encryption RHEL8 py3.10 run_on: - rhel87-small expansions: - AUTH: auth - SSL: ssl test_encryption: "true" PYTHON_BINARY: /opt/python/3.10/bin/python3 - - name: encryption-crypt_shared-rhel8-py3.11-auth-nossl + - name: encryption-crypt_shared-rhel8-py3.11 tasks: - - name: .replica_set - display_name: Encryption crypt_shared RHEL8 py3.11 Auth NoSSL + - name: .replica_set .noauth .ssl + display_name: Encryption crypt_shared RHEL8 py3.11 run_on: - rhel87-small expansions: - AUTH: auth - SSL: nossl test_encryption: "true" test_crypt_shared: "true" PYTHON_BINARY: /opt/python/3.11/bin/python3 - - name: encryption-pyopenssl-rhel8-py3.12-auth-ssl + - name: encryption-pyopenssl-rhel8-py3.12 tasks: - - name: .replica_set - display_name: Encryption PyOpenSSL RHEL8 py3.12 Auth SSL + - name: .standalone .noauth .nossl + display_name: Encryption PyOpenSSL RHEL8 py3.12 run_on: - rhel87-small expansions: - AUTH: auth - SSL: ssl test_encryption: "true" test_encryption_pyopenssl: "true" PYTHON_BINARY: /opt/python/3.12/bin/python3 - - name: encryption-rhel8-pypy3.9-auth-nossl + - name: encryption-rhel8-pypy3.9 tasks: - - name: .replica_set - display_name: Encryption RHEL8 pypy3.9 Auth NoSSL + - name: .sharded_cluster .auth .ssl + display_name: Encryption RHEL8 pypy3.9 run_on: - rhel87-small expansions: - AUTH: auth - SSL: nossl test_encryption: "true" PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 - - name: encryption-macos-py3.9-auth-ssl + - name: encryption-macos-py3.9 tasks: - name: .latest .replica_set - display_name: Encryption macOS py3.9 Auth SSL + display_name: Encryption macOS py3.9 run_on: - macos-14 batchtime: 10080 expansions: - AUTH: auth - SSL: ssl test_encryption: "true" PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 tags: [encryption_tag] - - name: encryption-macos-py3.13-auth-nossl + - name: encryption-macos-py3.13 tasks: - name: .latest .replica_set - display_name: Encryption macOS py3.13 Auth NoSSL + display_name: Encryption macOS py3.13 run_on: - macos-14 batchtime: 10080 expansions: - AUTH: auth - SSL: nossl test_encryption: "true" PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 tags: [encryption_tag] - - name: encryption-crypt_shared-macos-py3.9-auth-ssl + - name: encryption-crypt_shared-macos-py3.9 tasks: - name: .latest .replica_set - display_name: Encryption crypt_shared macOS py3.9 Auth SSL + display_name: Encryption crypt_shared macOS py3.9 run_on: - macos-14 batchtime: 10080 expansions: - AUTH: auth - SSL: ssl test_encryption: "true" test_crypt_shared: "true" PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 tags: [encryption_tag] - - name: encryption-crypt_shared-macos-py3.13-auth-nossl + - name: encryption-crypt_shared-macos-py3.13 tasks: - name: .latest .replica_set - display_name: Encryption crypt_shared macOS py3.13 Auth NoSSL + display_name: Encryption crypt_shared macOS py3.13 run_on: - macos-14 batchtime: 10080 expansions: - AUTH: auth - SSL: nossl test_encryption: "true" test_crypt_shared: "true" PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 tags: [encryption_tag] - - name: encryption-win64-py3.9-auth-ssl + - name: encryption-win64-py3.9 tasks: - name: .latest .replica_set - display_name: Encryption Win64 py3.9 Auth SSL + display_name: Encryption Win64 py3.9 run_on: - windows-64-vsMulti-small batchtime: 10080 expansions: - AUTH: auth - SSL: ssl test_encryption: "true" PYTHON_BINARY: C:/python/Python39/python.exe tags: [encryption_tag] - - name: encryption-win64-py3.13-auth-nossl + - name: encryption-win64-py3.13 tasks: - name: .latest .replica_set - display_name: Encryption Win64 py3.13 Auth NoSSL + display_name: Encryption Win64 py3.13 run_on: - windows-64-vsMulti-small batchtime: 10080 expansions: - AUTH: auth - SSL: nossl test_encryption: "true" PYTHON_BINARY: C:/python/Python313/python.exe tags: [encryption_tag] - - name: encryption-crypt_shared-win64-py3.9-auth-ssl + - name: encryption-crypt_shared-win64-py3.9 tasks: - name: .latest .replica_set - display_name: Encryption crypt_shared Win64 py3.9 Auth SSL + display_name: Encryption crypt_shared Win64 py3.9 run_on: - windows-64-vsMulti-small batchtime: 10080 expansions: - AUTH: auth - SSL: ssl test_encryption: "true" test_crypt_shared: "true" PYTHON_BINARY: C:/python/Python39/python.exe tags: [encryption_tag] - - name: encryption-crypt_shared-win64-py3.13-auth-nossl + - name: encryption-crypt_shared-win64-py3.13 tasks: - name: .latest .replica_set - display_name: Encryption crypt_shared Win64 py3.13 Auth NoSSL + display_name: Encryption crypt_shared Win64 py3.13 run_on: - windows-64-vsMulti-small batchtime: 10080 expansions: - AUTH: auth - SSL: nossl test_encryption: "true" test_crypt_shared: "true" PYTHON_BINARY: C:/python/Python313/python.exe @@ -791,7 +627,7 @@ buildvariants: # Green framework tests - name: eventlet-rhel8-py3.9 tasks: - - name: .standalone + - name: .standalone .noauth .nossl display_name: Eventlet RHEL8 py3.9 run_on: - rhel87-small @@ -802,7 +638,7 @@ buildvariants: PYTHON_BINARY: /opt/python/3.9/bin/python3 - name: gevent-rhel8-py3.9 tasks: - - name: .standalone + - name: .standalone .noauth .nossl display_name: Gevent RHEL8 py3.9 run_on: - rhel87-small @@ -813,7 +649,7 @@ buildvariants: PYTHON_BINARY: /opt/python/3.9/bin/python3 - name: eventlet-rhel8-py3.12 tasks: - - name: .standalone + - name: .standalone .noauth .nossl display_name: Eventlet RHEL8 py3.12 run_on: - rhel87-small @@ -824,7 +660,7 @@ buildvariants: PYTHON_BINARY: /opt/python/3.12/bin/python3 - name: gevent-rhel8-py3.12 tasks: - - name: .standalone + - name: .standalone .noauth .nossl display_name: Gevent RHEL8 py3.12 run_on: - rhel87-small @@ -1070,7 +906,7 @@ buildvariants: # No c ext tests - name: no-c-ext-rhel8-py3.9 tasks: - - name: .standalone + - name: .standalone .noauth .nossl display_name: No C Ext RHEL8 py3.9 run_on: - rhel87-small @@ -1079,7 +915,7 @@ buildvariants: PYTHON_BINARY: /opt/python/3.9/bin/python3 - name: no-c-ext-rhel8-py3.10 tasks: - - name: .replica_set + - name: .replica_set .noauth .nossl display_name: No C Ext RHEL8 py3.10 run_on: - rhel87-small @@ -1088,7 +924,7 @@ buildvariants: PYTHON_BINARY: /opt/python/3.10/bin/python3 - name: no-c-ext-rhel8-py3.11 tasks: - - name: .sharded_cluster + - name: .sharded_cluster .noauth .nossl display_name: No C Ext RHEL8 py3.11 run_on: - rhel87-small @@ -1097,7 +933,7 @@ buildvariants: PYTHON_BINARY: /opt/python/3.11/bin/python3 - name: no-c-ext-rhel8-py3.12 tasks: - - name: .standalone + - name: .standalone .noauth .nossl display_name: No C Ext RHEL8 py3.12 run_on: - rhel87-small @@ -1106,7 +942,7 @@ buildvariants: PYTHON_BINARY: /opt/python/3.12/bin/python3 - name: no-c-ext-rhel8-py3.13 tasks: - - name: .replica_set + - name: .replica_set .noauth .nossl display_name: No C Ext RHEL8 py3.13 run_on: - rhel87-small @@ -1285,94 +1121,80 @@ buildvariants: # Pyopenssl tests - name: pyopenssl-macos-py3.9 tasks: - - name: .replica_set - - name: .7.0 + - name: .replica_set .noauth .nossl + - name: .7.0 .noauth .nossl display_name: PyOpenSSL macOS py3.9 run_on: - macos-14 batchtime: 10080 expansions: - AUTH: noauth test_pyopenssl: "true" - SSL: ssl PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 - name: pyopenssl-rhel8-py3.10 tasks: - - name: .replica_set - - name: .7.0 + - name: .replica_set .auth .ssl + - name: .7.0 .auth .ssl display_name: PyOpenSSL RHEL8 py3.10 run_on: - rhel87-small batchtime: 10080 expansions: - AUTH: auth test_pyopenssl: "true" - SSL: ssl PYTHON_BINARY: /opt/python/3.10/bin/python3 - name: pyopenssl-rhel8-py3.11 tasks: - - name: .replica_set - - name: .7.0 + - name: .replica_set .auth .ssl + - name: .7.0 .auth .ssl display_name: PyOpenSSL RHEL8 py3.11 run_on: - rhel87-small batchtime: 10080 expansions: - AUTH: auth test_pyopenssl: "true" - SSL: ssl PYTHON_BINARY: /opt/python/3.11/bin/python3 - name: pyopenssl-rhel8-py3.12 tasks: - - name: .replica_set - - name: .7.0 + - name: .replica_set .auth .ssl + - name: .7.0 .auth .ssl display_name: PyOpenSSL RHEL8 py3.12 run_on: - rhel87-small batchtime: 10080 expansions: - AUTH: auth test_pyopenssl: "true" - SSL: ssl PYTHON_BINARY: /opt/python/3.12/bin/python3 - name: pyopenssl-win64-py3.13 tasks: - - name: .replica_set - - name: .7.0 + - name: .replica_set .auth .ssl + - name: .7.0 .auth .ssl display_name: PyOpenSSL Win64 py3.13 run_on: - windows-64-vsMulti-small batchtime: 10080 expansions: - AUTH: auth test_pyopenssl: "true" - SSL: ssl PYTHON_BINARY: C:/python/Python313/python.exe - name: pyopenssl-rhel8-pypy3.9 tasks: - - name: .replica_set - - name: .7.0 + - name: .replica_set .auth .ssl + - name: .7.0 .auth .ssl display_name: PyOpenSSL RHEL8 pypy3.9 run_on: - rhel87-small batchtime: 10080 expansions: - AUTH: auth test_pyopenssl: "true" - SSL: ssl PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 - name: pyopenssl-rhel8-pypy3.10 tasks: - - name: .replica_set - - name: .7.0 + - name: .replica_set .auth .ssl + - name: .7.0 .auth .ssl display_name: PyOpenSSL RHEL8 pypy3.10 run_on: - rhel87-small batchtime: 10080 expansions: - AUTH: auth test_pyopenssl: "true" - SSL: ssl PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 # Search index tests @@ -1386,794 +1208,196 @@ buildvariants: PYTHON_BINARY: /opt/python/3.9/bin/python3 # Server tests - - name: test-rhel8-py3.9-auth-ssl-cov + - name: test-rhel8-py3.9-cov tasks: - name: .standalone - name: .replica_set - name: .sharded_cluster - display_name: Test RHEL8 py3.9 Auth SSL cov + display_name: Test RHEL8 py3.9 cov run_on: - rhel87-small expansions: - AUTH: auth - SSL: ssl COVERAGE: coverage PYTHON_BINARY: /opt/python/3.9/bin/python3 tags: [coverage_tag] - - name: test-rhel8-py3.9-noauth-ssl-cov + - name: test-rhel8-py3.13-cov tasks: - name: .standalone - name: .replica_set - name: .sharded_cluster - display_name: Test RHEL8 py3.9 NoAuth SSL cov + display_name: Test RHEL8 py3.13 cov run_on: - rhel87-small expansions: - AUTH: noauth - SSL: ssl COVERAGE: coverage - PYTHON_BINARY: /opt/python/3.9/bin/python3 + PYTHON_BINARY: /opt/python/3.13/bin/python3 tags: [coverage_tag] - - name: test-rhel8-py3.9-noauth-nossl-cov + - name: test-rhel8-pypy3.10-cov tasks: - name: .standalone - name: .replica_set - name: .sharded_cluster - display_name: Test RHEL8 py3.9 NoAuth NoSSL cov + display_name: Test RHEL8 pypy3.10 cov run_on: - rhel87-small expansions: - AUTH: noauth - SSL: nossl COVERAGE: coverage - PYTHON_BINARY: /opt/python/3.9/bin/python3 + PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 tags: [coverage_tag] - - name: test-rhel8-py3.13-auth-ssl-cov + - name: test-rhel8-py3.10 tasks: - - name: .standalone - - name: .replica_set - - name: .sharded_cluster - display_name: Test RHEL8 py3.13 Auth SSL cov + - name: .sharded_cluster .auth .ssl + - name: .replica_set .noauth .ssl + - name: .standalone .noauth .nossl + display_name: Test RHEL8 py3.10 run_on: - rhel87-small expansions: - AUTH: auth - SSL: ssl COVERAGE: coverage - PYTHON_BINARY: /opt/python/3.13/bin/python3 - tags: [coverage_tag] - - name: test-rhel8-py3.13-noauth-ssl-cov + PYTHON_BINARY: /opt/python/3.10/bin/python3 + - name: test-rhel8-py3.11 tasks: - - name: .standalone - - name: .replica_set - - name: .sharded_cluster - display_name: Test RHEL8 py3.13 NoAuth SSL cov + - name: .sharded_cluster .auth .ssl + - name: .replica_set .noauth .ssl + - name: .standalone .noauth .nossl + display_name: Test RHEL8 py3.11 run_on: - rhel87-small expansions: - AUTH: noauth - SSL: ssl COVERAGE: coverage - PYTHON_BINARY: /opt/python/3.13/bin/python3 - tags: [coverage_tag] - - name: test-rhel8-py3.13-noauth-nossl-cov + PYTHON_BINARY: /opt/python/3.11/bin/python3 + - name: test-rhel8-py3.12 tasks: - - name: .standalone - - name: .replica_set - - name: .sharded_cluster - display_name: Test RHEL8 py3.13 NoAuth NoSSL cov + - name: .sharded_cluster .auth .ssl + - name: .replica_set .noauth .ssl + - name: .standalone .noauth .nossl + display_name: Test RHEL8 py3.12 run_on: - rhel87-small expansions: - AUTH: noauth - SSL: nossl COVERAGE: coverage - PYTHON_BINARY: /opt/python/3.13/bin/python3 - tags: [coverage_tag] - - name: test-rhel8-pypy3.10-auth-ssl-cov + PYTHON_BINARY: /opt/python/3.12/bin/python3 + - name: test-rhel8-pypy3.9 tasks: - - name: .standalone - - name: .replica_set - - name: .sharded_cluster - display_name: Test RHEL8 pypy3.10 Auth SSL cov + - name: .sharded_cluster .auth .ssl + - name: .replica_set .noauth .ssl + - name: .standalone .noauth .nossl + display_name: Test RHEL8 pypy3.9 run_on: - rhel87-small expansions: - AUTH: auth - SSL: ssl COVERAGE: coverage - PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 - tags: [coverage_tag] - - name: test-rhel8-pypy3.10-noauth-ssl-cov + PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 + - name: test-macos-py3.9 tasks: - - name: .standalone - - name: .replica_set - - name: .sharded_cluster - display_name: Test RHEL8 pypy3.10 NoAuth SSL cov - run_on: - - rhel87-small - expansions: - AUTH: noauth - SSL: ssl - COVERAGE: coverage - PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 - tags: [coverage_tag] - - name: test-rhel8-pypy3.10-noauth-nossl-cov - tasks: - - name: .standalone - - name: .replica_set - - name: .sharded_cluster - display_name: Test RHEL8 pypy3.10 NoAuth NoSSL cov - run_on: - - rhel87-small - expansions: - AUTH: noauth - SSL: nossl - COVERAGE: coverage - PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 - tags: [coverage_tag] - - name: test-rhel8-py3.10-auth-ssl - tasks: - - name: .standalone - display_name: Test RHEL8 py3.10 Auth SSL - run_on: - - rhel87-small - expansions: - AUTH: auth - SSL: ssl - PYTHON_BINARY: /opt/python/3.10/bin/python3 - - name: test-rhel8-py3.11-noauth-ssl - tasks: - - name: .replica_set - display_name: Test RHEL8 py3.11 NoAuth SSL - run_on: - - rhel87-small - expansions: - AUTH: noauth - SSL: ssl - PYTHON_BINARY: /opt/python/3.11/bin/python3 - - name: test-rhel8-py3.12-noauth-nossl - tasks: - - name: .sharded_cluster - display_name: Test RHEL8 py3.12 NoAuth NoSSL - run_on: - - rhel87-small - expansions: - AUTH: noauth - SSL: nossl - PYTHON_BINARY: /opt/python/3.12/bin/python3 - - name: test-rhel8-pypy3.9-auth-ssl - tasks: - - name: .standalone - display_name: Test RHEL8 pypy3.9 Auth SSL - run_on: - - rhel87-small - expansions: - AUTH: auth - SSL: ssl - PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 - - name: test-macos-py3.9-auth-ssl-sync - tasks: - - name: .standalone - display_name: Test macOS py3.9 Auth SSL Sync + - name: .sharded_cluster .auth .ssl + - name: .replica_set .noauth .ssl + - name: .standalone .noauth .nossl + display_name: Test macOS py3.9 run_on: - macos-14 expansions: - AUTH: auth - SSL: ssl - TEST_SUITES: default - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 - - name: test-macos-py3.9-noauth-ssl-sync - tasks: - - name: .standalone - display_name: Test macOS py3.9 NoAuth SSL Sync - run_on: - - macos-14 - expansions: - AUTH: noauth - SSL: ssl - TEST_SUITES: default SKIP_CSOT_TESTS: "true" PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 - - name: test-macos-py3.9-noauth-nossl-sync - tasks: - - name: .standalone - display_name: Test macOS py3.9 NoAuth NoSSL Sync - run_on: - - macos-14 - expansions: - AUTH: noauth - SSL: nossl - TEST_SUITES: default - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 - - name: test-macos-py3.9-auth-ssl-async - tasks: - - name: .standalone - display_name: Test macOS py3.9 Auth SSL Async - run_on: - - macos-14 - expansions: - AUTH: auth - SSL: ssl - TEST_SUITES: default_async - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 - - name: test-macos-py3.9-noauth-ssl-async - tasks: - - name: .standalone - display_name: Test macOS py3.9 NoAuth SSL Async - run_on: - - macos-14 - expansions: - AUTH: noauth - SSL: ssl - TEST_SUITES: default_async - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 - - name: test-macos-py3.9-noauth-nossl-async - tasks: - - name: .standalone - display_name: Test macOS py3.9 NoAuth NoSSL Async - run_on: - - macos-14 - expansions: - AUTH: noauth - SSL: nossl - TEST_SUITES: default_async - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 - - name: test-macos-py3.13-auth-ssl-sync - tasks: - - name: .sharded_cluster - display_name: Test macOS py3.13 Auth SSL Sync - run_on: - - macos-14 - expansions: - AUTH: auth - SSL: ssl - TEST_SUITES: default - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 - - name: test-macos-py3.13-noauth-ssl-sync - tasks: - - name: .sharded_cluster - display_name: Test macOS py3.13 NoAuth SSL Sync - run_on: - - macos-14 - expansions: - AUTH: noauth - SSL: ssl - TEST_SUITES: default - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 - - name: test-macos-py3.13-noauth-nossl-sync - tasks: - - name: .sharded_cluster - display_name: Test macOS py3.13 NoAuth NoSSL Sync - run_on: - - macos-14 - expansions: - AUTH: noauth - SSL: nossl - TEST_SUITES: default - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 - - name: test-macos-py3.13-auth-ssl-async - tasks: - - name: .sharded_cluster - display_name: Test macOS py3.13 Auth SSL Async - run_on: - - macos-14 - expansions: - AUTH: auth - SSL: ssl - TEST_SUITES: default_async - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 - - name: test-macos-py3.13-noauth-ssl-async + - name: test-macos-py3.13 tasks: - - name: .sharded_cluster - display_name: Test macOS py3.13 NoAuth SSL Async + - name: .sharded_cluster .auth .ssl + - name: .replica_set .noauth .ssl + - name: .standalone .noauth .nossl + display_name: Test macOS py3.13 run_on: - macos-14 expansions: - AUTH: noauth - SSL: ssl - TEST_SUITES: default_async SKIP_CSOT_TESTS: "true" PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 - - name: test-macos-py3.13-noauth-nossl-async - tasks: - - name: .sharded_cluster - display_name: Test macOS py3.13 NoAuth NoSSL Async - run_on: - - macos-14 - expansions: - AUTH: noauth - SSL: nossl - TEST_SUITES: default_async - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 - - name: test-macos-arm64-py3.9-auth-ssl-sync - tasks: - - name: .standalone .6.0 - - name: .standalone .7.0 - - name: .standalone .8.0 - - name: .standalone .rapid - - name: .standalone .latest - display_name: Test macOS Arm64 py3.9 Auth SSL Sync + - name: test-macos-arm64-py3.9 + tasks: + - name: .sharded_cluster .auth .ssl .6.0 + - name: .replica_set .noauth .ssl .6.0 + - name: .standalone .noauth .nossl .6.0 + - name: .sharded_cluster .auth .ssl .7.0 + - name: .replica_set .noauth .ssl .7.0 + - name: .standalone .noauth .nossl .7.0 + - name: .sharded_cluster .auth .ssl .8.0 + - name: .replica_set .noauth .ssl .8.0 + - name: .standalone .noauth .nossl .8.0 + - name: .sharded_cluster .auth .ssl .rapid + - name: .replica_set .noauth .ssl .rapid + - name: .standalone .noauth .nossl .rapid + - name: .sharded_cluster .auth .ssl .latest + - name: .replica_set .noauth .ssl .latest + - name: .standalone .noauth .nossl .latest + display_name: Test macOS Arm64 py3.9 run_on: - macos-14-arm64 expansions: - AUTH: auth - SSL: ssl - TEST_SUITES: default SKIP_CSOT_TESTS: "true" PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 - - name: test-macos-arm64-py3.9-noauth-ssl-sync - tasks: - - name: .standalone .6.0 - - name: .standalone .7.0 - - name: .standalone .8.0 - - name: .standalone .rapid - - name: .standalone .latest - display_name: Test macOS Arm64 py3.9 NoAuth SSL Sync + - name: test-macos-arm64-py3.13 + tasks: + - name: .sharded_cluster .auth .ssl .6.0 + - name: .replica_set .noauth .ssl .6.0 + - name: .standalone .noauth .nossl .6.0 + - name: .sharded_cluster .auth .ssl .7.0 + - name: .replica_set .noauth .ssl .7.0 + - name: .standalone .noauth .nossl .7.0 + - name: .sharded_cluster .auth .ssl .8.0 + - name: .replica_set .noauth .ssl .8.0 + - name: .standalone .noauth .nossl .8.0 + - name: .sharded_cluster .auth .ssl .rapid + - name: .replica_set .noauth .ssl .rapid + - name: .standalone .noauth .nossl .rapid + - name: .sharded_cluster .auth .ssl .latest + - name: .replica_set .noauth .ssl .latest + - name: .standalone .noauth .nossl .latest + display_name: Test macOS Arm64 py3.13 run_on: - macos-14-arm64 expansions: - AUTH: noauth - SSL: ssl - TEST_SUITES: default - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 - - name: test-macos-arm64-py3.9-noauth-nossl-sync - tasks: - - name: .standalone .6.0 - - name: .standalone .7.0 - - name: .standalone .8.0 - - name: .standalone .rapid - - name: .standalone .latest - display_name: Test macOS Arm64 py3.9 NoAuth NoSSL Sync - run_on: - - macos-14-arm64 - expansions: - AUTH: noauth - SSL: nossl - TEST_SUITES: default - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 - - name: test-macos-arm64-py3.9-auth-ssl-async - tasks: - - name: .standalone .6.0 - - name: .standalone .7.0 - - name: .standalone .8.0 - - name: .standalone .rapid - - name: .standalone .latest - display_name: Test macOS Arm64 py3.9 Auth SSL Async - run_on: - - macos-14-arm64 - expansions: - AUTH: auth - SSL: ssl - TEST_SUITES: default_async - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 - - name: test-macos-arm64-py3.9-noauth-ssl-async - tasks: - - name: .standalone .6.0 - - name: .standalone .7.0 - - name: .standalone .8.0 - - name: .standalone .rapid - - name: .standalone .latest - display_name: Test macOS Arm64 py3.9 NoAuth SSL Async - run_on: - - macos-14-arm64 - expansions: - AUTH: noauth - SSL: ssl - TEST_SUITES: default_async - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 - - name: test-macos-arm64-py3.9-noauth-nossl-async - tasks: - - name: .standalone .6.0 - - name: .standalone .7.0 - - name: .standalone .8.0 - - name: .standalone .rapid - - name: .standalone .latest - display_name: Test macOS Arm64 py3.9 NoAuth NoSSL Async - run_on: - - macos-14-arm64 - expansions: - AUTH: noauth - SSL: nossl - TEST_SUITES: default_async - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 - - name: test-macos-arm64-py3.13-auth-ssl-sync - tasks: - - name: .sharded_cluster .6.0 - - name: .sharded_cluster .7.0 - - name: .sharded_cluster .8.0 - - name: .sharded_cluster .rapid - - name: .sharded_cluster .latest - display_name: Test macOS Arm64 py3.13 Auth SSL Sync - run_on: - - macos-14-arm64 - expansions: - AUTH: auth - SSL: ssl - TEST_SUITES: default - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 - - name: test-macos-arm64-py3.13-noauth-ssl-sync - tasks: - - name: .sharded_cluster .6.0 - - name: .sharded_cluster .7.0 - - name: .sharded_cluster .8.0 - - name: .sharded_cluster .rapid - - name: .sharded_cluster .latest - display_name: Test macOS Arm64 py3.13 NoAuth SSL Sync - run_on: - - macos-14-arm64 - expansions: - AUTH: noauth - SSL: ssl - TEST_SUITES: default SKIP_CSOT_TESTS: "true" PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 - - name: test-macos-arm64-py3.13-noauth-nossl-sync + - name: test-win64-py3.9 tasks: - - name: .sharded_cluster .6.0 - - name: .sharded_cluster .7.0 - - name: .sharded_cluster .8.0 - - name: .sharded_cluster .rapid - - name: .sharded_cluster .latest - display_name: Test macOS Arm64 py3.13 NoAuth NoSSL Sync - run_on: - - macos-14-arm64 - expansions: - AUTH: noauth - SSL: nossl - TEST_SUITES: default - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 - - name: test-macos-arm64-py3.13-auth-ssl-async - tasks: - - name: .sharded_cluster .6.0 - - name: .sharded_cluster .7.0 - - name: .sharded_cluster .8.0 - - name: .sharded_cluster .rapid - - name: .sharded_cluster .latest - display_name: Test macOS Arm64 py3.13 Auth SSL Async - run_on: - - macos-14-arm64 - expansions: - AUTH: auth - SSL: ssl - TEST_SUITES: default_async - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 - - name: test-macos-arm64-py3.13-noauth-ssl-async - tasks: - - name: .sharded_cluster .6.0 - - name: .sharded_cluster .7.0 - - name: .sharded_cluster .8.0 - - name: .sharded_cluster .rapid - - name: .sharded_cluster .latest - display_name: Test macOS Arm64 py3.13 NoAuth SSL Async - run_on: - - macos-14-arm64 - expansions: - AUTH: noauth - SSL: ssl - TEST_SUITES: default_async - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 - - name: test-macos-arm64-py3.13-noauth-nossl-async - tasks: - - name: .sharded_cluster .6.0 - - name: .sharded_cluster .7.0 - - name: .sharded_cluster .8.0 - - name: .sharded_cluster .rapid - - name: .sharded_cluster .latest - display_name: Test macOS Arm64 py3.13 NoAuth NoSSL Async - run_on: - - macos-14-arm64 - expansions: - AUTH: noauth - SSL: nossl - TEST_SUITES: default_async - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 - - name: test-win64-py3.9-auth-ssl-sync - tasks: - - name: .standalone - display_name: Test Win64 py3.9 Auth SSL Sync - run_on: - - windows-64-vsMulti-small - expansions: - AUTH: auth - SSL: ssl - TEST_SUITES: default - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: C:/python/Python39/python.exe - - name: test-win64-py3.9-noauth-ssl-sync - tasks: - - name: .standalone - display_name: Test Win64 py3.9 NoAuth SSL Sync + - name: .sharded_cluster .auth .ssl + - name: .replica_set .noauth .ssl + - name: .standalone .noauth .nossl + display_name: Test Win64 py3.9 run_on: - windows-64-vsMulti-small expansions: - AUTH: noauth - SSL: ssl - TEST_SUITES: default SKIP_CSOT_TESTS: "true" PYTHON_BINARY: C:/python/Python39/python.exe - - name: test-win64-py3.9-noauth-nossl-sync + - name: test-win64-py3.13 tasks: - - name: .standalone - display_name: Test Win64 py3.9 NoAuth NoSSL Sync + - name: .sharded_cluster .auth .ssl + - name: .replica_set .noauth .ssl + - name: .standalone .noauth .nossl + display_name: Test Win64 py3.13 run_on: - windows-64-vsMulti-small expansions: - AUTH: noauth - SSL: nossl - TEST_SUITES: default - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: C:/python/Python39/python.exe - - name: test-win64-py3.9-auth-ssl-async - tasks: - - name: .standalone - display_name: Test Win64 py3.9 Auth SSL Async - run_on: - - windows-64-vsMulti-small - expansions: - AUTH: auth - SSL: ssl - TEST_SUITES: default_async - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: C:/python/Python39/python.exe - - name: test-win64-py3.9-noauth-ssl-async - tasks: - - name: .standalone - display_name: Test Win64 py3.9 NoAuth SSL Async - run_on: - - windows-64-vsMulti-small - expansions: - AUTH: noauth - SSL: ssl - TEST_SUITES: default_async - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: C:/python/Python39/python.exe - - name: test-win64-py3.9-noauth-nossl-async - tasks: - - name: .standalone - display_name: Test Win64 py3.9 NoAuth NoSSL Async - run_on: - - windows-64-vsMulti-small - expansions: - AUTH: noauth - SSL: nossl - TEST_SUITES: default_async - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: C:/python/Python39/python.exe - - name: test-win64-py3.13-auth-ssl-sync - tasks: - - name: .sharded_cluster - display_name: Test Win64 py3.13 Auth SSL Sync - run_on: - - windows-64-vsMulti-small - expansions: - AUTH: auth - SSL: ssl - TEST_SUITES: default SKIP_CSOT_TESTS: "true" PYTHON_BINARY: C:/python/Python313/python.exe - - name: test-win64-py3.13-noauth-ssl-sync + - name: test-win32-py3.9 tasks: - - name: .sharded_cluster - display_name: Test Win64 py3.13 NoAuth SSL Sync + - name: .sharded_cluster .auth .ssl + - name: .replica_set .noauth .ssl + - name: .standalone .noauth .nossl + display_name: Test Win32 py3.9 run_on: - windows-64-vsMulti-small expansions: - AUTH: noauth - SSL: ssl - TEST_SUITES: default - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: C:/python/Python313/python.exe - - name: test-win64-py3.13-noauth-nossl-sync - tasks: - - name: .sharded_cluster - display_name: Test Win64 py3.13 NoAuth NoSSL Sync - run_on: - - windows-64-vsMulti-small - expansions: - AUTH: noauth - SSL: nossl - TEST_SUITES: default - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: C:/python/Python313/python.exe - - name: test-win64-py3.13-auth-ssl-async - tasks: - - name: .sharded_cluster - display_name: Test Win64 py3.13 Auth SSL Async - run_on: - - windows-64-vsMulti-small - expansions: - AUTH: auth - SSL: ssl - TEST_SUITES: default_async - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: C:/python/Python313/python.exe - - name: test-win64-py3.13-noauth-ssl-async - tasks: - - name: .sharded_cluster - display_name: Test Win64 py3.13 NoAuth SSL Async - run_on: - - windows-64-vsMulti-small - expansions: - AUTH: noauth - SSL: ssl - TEST_SUITES: default_async - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: C:/python/Python313/python.exe - - name: test-win64-py3.13-noauth-nossl-async - tasks: - - name: .sharded_cluster - display_name: Test Win64 py3.13 NoAuth NoSSL Async - run_on: - - windows-64-vsMulti-small - expansions: - AUTH: noauth - SSL: nossl - TEST_SUITES: default_async - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: C:/python/Python313/python.exe - - name: test-win32-py3.9-auth-ssl-sync - tasks: - - name: .standalone - display_name: Test Win32 py3.9 Auth SSL Sync - run_on: - - windows-64-vsMulti-small - expansions: - AUTH: auth - SSL: ssl - TEST_SUITES: default - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: C:/python/32/Python39/python.exe - - name: test-win32-py3.9-noauth-ssl-sync - tasks: - - name: .standalone - display_name: Test Win32 py3.9 NoAuth SSL Sync - run_on: - - windows-64-vsMulti-small - expansions: - AUTH: noauth - SSL: ssl - TEST_SUITES: default - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: C:/python/32/Python39/python.exe - - name: test-win32-py3.9-noauth-nossl-sync - tasks: - - name: .standalone - display_name: Test Win32 py3.9 NoAuth NoSSL Sync - run_on: - - windows-64-vsMulti-small - expansions: - AUTH: noauth - SSL: nossl - TEST_SUITES: default - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: C:/python/32/Python39/python.exe - - name: test-win32-py3.9-auth-ssl-async - tasks: - - name: .standalone - display_name: Test Win32 py3.9 Auth SSL Async - run_on: - - windows-64-vsMulti-small - expansions: - AUTH: auth - SSL: ssl - TEST_SUITES: default_async - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: C:/python/32/Python39/python.exe - - name: test-win32-py3.9-noauth-ssl-async - tasks: - - name: .standalone - display_name: Test Win32 py3.9 NoAuth SSL Async - run_on: - - windows-64-vsMulti-small - expansions: - AUTH: noauth - SSL: ssl - TEST_SUITES: default_async - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: C:/python/32/Python39/python.exe - - name: test-win32-py3.9-noauth-nossl-async - tasks: - - name: .standalone - display_name: Test Win32 py3.9 NoAuth NoSSL Async - run_on: - - windows-64-vsMulti-small - expansions: - AUTH: noauth - SSL: nossl - TEST_SUITES: default_async SKIP_CSOT_TESTS: "true" PYTHON_BINARY: C:/python/32/Python39/python.exe - - name: test-win32-py3.13-auth-ssl-sync + - name: test-win32-py3.13 tasks: - - name: .sharded_cluster - display_name: Test Win32 py3.13 Auth SSL Sync + - name: .sharded_cluster .auth .ssl + - name: .replica_set .noauth .ssl + - name: .standalone .noauth .nossl + display_name: Test Win32 py3.13 run_on: - windows-64-vsMulti-small expansions: - AUTH: auth - SSL: ssl - TEST_SUITES: default - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: C:/python/32/Python313/python.exe - - name: test-win32-py3.13-noauth-ssl-sync - tasks: - - name: .sharded_cluster - display_name: Test Win32 py3.13 NoAuth SSL Sync - run_on: - - windows-64-vsMulti-small - expansions: - AUTH: noauth - SSL: ssl - TEST_SUITES: default - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: C:/python/32/Python313/python.exe - - name: test-win32-py3.13-noauth-nossl-sync - tasks: - - name: .sharded_cluster - display_name: Test Win32 py3.13 NoAuth NoSSL Sync - run_on: - - windows-64-vsMulti-small - expansions: - AUTH: noauth - SSL: nossl - TEST_SUITES: default - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: C:/python/32/Python313/python.exe - - name: test-win32-py3.13-auth-ssl-async - tasks: - - name: .sharded_cluster - display_name: Test Win32 py3.13 Auth SSL Async - run_on: - - windows-64-vsMulti-small - expansions: - AUTH: auth - SSL: ssl - TEST_SUITES: default_async - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: C:/python/32/Python313/python.exe - - name: test-win32-py3.13-noauth-ssl-async - tasks: - - name: .sharded_cluster - display_name: Test Win32 py3.13 NoAuth SSL Async - run_on: - - windows-64-vsMulti-small - expansions: - AUTH: noauth - SSL: ssl - TEST_SUITES: default_async - SKIP_CSOT_TESTS: "true" - PYTHON_BINARY: C:/python/32/Python313/python.exe - - name: test-win32-py3.13-noauth-nossl-async - tasks: - - name: .sharded_cluster - display_name: Test Win32 py3.13 NoAuth NoSSL Async - run_on: - - windows-64-vsMulti-small - expansions: - AUTH: noauth - SSL: nossl - TEST_SUITES: default_async SKIP_CSOT_TESTS: "true" PYTHON_BINARY: C:/python/32/Python313/python.exe diff --git a/.evergreen/scripts/generate_config.py b/.evergreen/scripts/generate_config.py index b65d9b62da..b8b8fa367c 100644 --- a/.evergreen/scripts/generate_config.py +++ b/.evergreen/scripts/generate_config.py @@ -17,8 +17,9 @@ from typing import Any from shrub.v3.evg_build_variant import BuildVariant +from shrub.v3.evg_command import FunctionCall from shrub.v3.evg_project import EvgProject -from shrub.v3.evg_task import EvgTaskRef +from shrub.v3.evg_task import EvgTask, EvgTaskRef from shrub.v3.shrub_service import ShrubService ############## @@ -34,6 +35,12 @@ AUTH_SSLS = [("auth", "ssl"), ("noauth", "ssl"), ("noauth", "nossl")] TOPOLOGIES = ["standalone", "replica_set", "sharded_cluster"] C_EXTS = ["with_ext", "without_ext"] +# By default test each of the topologies with a subset of auth/ssl. +SUB_TASKS = [ + ".sharded_cluster .auth .ssl", + ".replica_set .noauth .ssl", + ".standalone .noauth .nossl", +] SYNCS = ["sync", "async"] DISPLAY_LOOKUP = dict( ssl=dict(ssl="SSL", nossl="NoSSL"), @@ -175,10 +182,10 @@ def handle_c_ext(c_ext, expansions): expansions["NO_EXT"] = "1" -def create_yaml(tasks=None, variants=None): +def generate_yaml(tasks=None, variants=None): """Generate the yaml for a given set of tasks and variants.""" project = EvgProject(tasks=tasks, buildvariants=variants) - out = ShrubService.create_yaml(project) + out = ShrubService.generate_yaml(project) # Dedent by two spaces to match what we use in config.yml lines = [line[2:] for line in out.splitlines()] print("\n".join(lines)) # noqa: T201 @@ -233,9 +240,9 @@ def create_server_variants() -> list[BuildVariant]: # Run the full matrix on linux with min and max CPython, and latest pypy. host = "rhel8" - for python, (auth, ssl) in product([*MIN_MAX_PYTHON, PYPYS[-1]], AUTH_SSLS): + for python in [*MIN_MAX_PYTHON, PYPYS[-1]]: display_name = f"Test {host}" - expansions = dict(AUTH=auth, SSL=ssl, COVERAGE="coverage") + expansions = dict(COVERAGE="coverage") display_name = get_display_name("Test", host, python=python, **expansions) variant = create_variant( [f".{t}" for t in TOPOLOGIES], @@ -247,15 +254,12 @@ def create_server_variants() -> list[BuildVariant]: ) variants.append(variant) - # Test the rest of the pythons on linux. - for python, (auth, ssl), topology in zip_cycle( - CPYTHONS[1:-1] + PYPYS[:-1], AUTH_SSLS, TOPOLOGIES - ): + # Test the rest of the pythons. + for python in CPYTHONS[1:-1] + PYPYS[:-1]: display_name = f"Test {host}" - expansions = dict(AUTH=auth, SSL=ssl) - display_name = get_display_name("Test", host, python=python, **expansions) + display_name = get_display_name("Test", host, python=python) variant = create_variant( - [f".{topology}"], + SUB_TASKS, display_name, python=python, host=host, @@ -265,18 +269,14 @@ def create_server_variants() -> list[BuildVariant]: # Test a subset on each of the other platforms. for host in ("macos", "macos-arm64", "win64", "win32"): - for ( - python, - sync, - (auth, ssl), - ) in product(MIN_MAX_PYTHON, SYNCS, AUTH_SSLS): - test_suite = "default" if sync == "sync" else "default_async" - topology = TOPOLOGIES[0] if python == CPYTHONS[0] else TOPOLOGIES[-1] - tasks = [f".{topology}"] + for python in MIN_MAX_PYTHON: + tasks = SUB_TASKS # MacOS arm64 only works on server versions 6.0+ if host == "macos-arm64": - tasks = [f".{topology} .{version}" for version in get_versions_from("6.0")] - expansions = dict(AUTH=auth, SSL=ssl, TEST_SUITES=test_suite, SKIP_CSOT_TESTS="true") + tasks = [] + for version in get_versions_from("6.0"): + tasks.extend(f"{t} .{version}" for t in SUB_TASKS) + expansions = dict(SKIP_CSOT_TESTS="true") display_name = get_display_name("Test", host, python=python, **expansions) variant = create_variant( tasks, @@ -295,8 +295,8 @@ def create_encryption_variants() -> list[BuildVariant]: tags = ["encryption_tag"] batchtime = BATCHTIME_WEEK - def get_encryption_expansions(encryption, ssl="ssl"): - expansions = dict(AUTH="auth", SSL=ssl, test_encryption="true") + def get_encryption_expansions(encryption): + expansions = dict(test_encryption="true") if "crypt_shared" in encryption: expansions["test_crypt_shared"] = "true" if "PyOpenSSL" in encryption: @@ -305,13 +305,13 @@ def get_encryption_expansions(encryption, ssl="ssl"): host = "rhel8" - # Test against all server versions and topolgies for the three main python versions. + # Test against all server versions for the three main python versions. encryptions = ["Encryption", "Encryption crypt_shared", "Encryption PyOpenSSL"] for encryption, python in product(encryptions, [*MIN_MAX_PYTHON, PYPYS[-1]]): expansions = get_encryption_expansions(encryption) display_name = get_display_name(encryption, host, python=python, **expansions) variant = create_variant( - [f".{t}" for t in TOPOLOGIES], + SUB_TASKS, display_name, python=python, host=host, @@ -322,13 +322,11 @@ def get_encryption_expansions(encryption, ssl="ssl"): variants.append(variant) # Test the rest of the pythons on linux for all server versions. - for encryption, python, ssl in zip_cycle( - encryptions, CPYTHONS[1:-1] + PYPYS[:-1], ["ssl", "nossl"] - ): - expansions = get_encryption_expansions(encryption, ssl) + for encryption, python, task in zip_cycle(encryptions, CPYTHONS[1:-1] + PYPYS[:-1], SUB_TASKS): + expansions = get_encryption_expansions(encryption) display_name = get_display_name(encryption, host, python=python, **expansions) variant = create_variant( - [".replica_set"], + [task], display_name, python=python, host=host, @@ -340,8 +338,7 @@ def get_encryption_expansions(encryption, ssl="ssl"): encryptions = ["Encryption", "Encryption crypt_shared"] task_names = [".latest .replica_set"] for host, encryption, python in product(["macos", "win64"], encryptions, MIN_MAX_PYTHON): - ssl = "ssl" if python == CPYTHONS[0] else "nossl" - expansions = get_encryption_expansions(encryption, ssl) + expansions = get_encryption_expansions(encryption) display_name = get_display_name(encryption, host, python=python, **expansions) variant = create_variant( task_names, @@ -386,7 +383,8 @@ def create_compression_variants(): # Compression tests - standalone versions of each server, across python versions, with and without c extensions. # PyPy interpreters are always tested without extensions. host = "rhel8" - task_names = dict(snappy=[".standalone"], zlib=[".standalone"], zstd=[".standalone !.4.0"]) + base_task = ".standalone .noauth .nossl" + task_names = dict(snappy=[base_task], zlib=[base_task], zstd=[f"{base_task} !.4.0"]) variants = [] for ind, (compressor, c_ext) in enumerate(product(["snappy", "zlib", "zstd"], C_EXTS)): expansions = dict(COMPRESSORS=compressor) @@ -445,24 +443,23 @@ def create_enterprise_auth_variants(): def create_pyopenssl_variants(): base_name = "PyOpenSSL" batchtime = BATCHTIME_WEEK - base_expansions = dict(test_pyopenssl="true", SSL="ssl") + expansions = dict(test_pyopenssl="true") variants = [] for python in ALL_PYTHONS: # Only test "noauth" with min python. auth = "noauth" if python == CPYTHONS[0] else "auth" + ssl = "nossl" if auth == "noauth" else "ssl" if python == CPYTHONS[0]: host = "macos" elif python == CPYTHONS[-1]: host = "win64" else: host = "rhel8" - expansions = dict(AUTH=auth) - expansions.update(base_expansions) display_name = get_display_name(base_name, host, python=python) variant = create_variant( - [".replica_set", ".7.0"], + [f".replica_set .{auth} .{ssl}", f".7.0 .{auth} .{ssl}"], display_name, python=python, host=host, @@ -482,12 +479,12 @@ def create_storage_engine_tests(): python = CPYTHONS[0] expansions = dict(STORAGE_ENGINE=engine.lower()) if engine == engines[0]: - tasks = [f".standalone .{v}" for v in ALL_VERSIONS] + tasks = [f".standalone .noauth .nossl .{v}" for v in ALL_VERSIONS] else: # MongoDB 4.2 drops support for MMAPv1 versions = get_versions_until("4.0") - tasks = [f".standalone .{v}" for v in versions] + [ - f".replica_set .{v}" for v in versions + tasks = [f".standalone .{v} .noauth .nossl" for v in versions] + [ + f".replica_set .{v} .noauth .nossl" for v in versions ] display_name = get_display_name(f"Storage {engine}", host, python=python) variant = create_variant( @@ -500,7 +497,7 @@ def create_storage_engine_tests(): def create_versioned_api_tests(): host = "rhel8" tags = ["versionedApi_tag"] - tasks = [f".standalone .{v}" for v in get_versions_from("5.0")] + tasks = [f".standalone .{v} .noauth .nossl" for v in get_versions_from("5.0")] variants = [] types = ["require v1", "accept v2"] @@ -531,7 +528,7 @@ def create_versioned_api_tests(): def create_green_framework_variants(): variants = [] - tasks = [".standalone"] + tasks = [".standalone .noauth .nossl"] host = "rhel8" for python, framework in product([CPYTHONS[0], CPYTHONS[-2]], ["eventlet", "gevent"]): expansions = dict(GREEN_FRAMEWORK=framework, AUTH="auth", SSL="ssl") @@ -547,7 +544,7 @@ def create_no_c_ext_variants(): variants = [] host = "rhel8" for python, topology in zip_cycle(CPYTHONS, TOPOLOGIES): - tasks = [f".{topology}"] + tasks = [f".{topology} .noauth .nossl"] expansions = dict() handle_c_ext(C_EXTS[0], expansions) display_name = get_display_name("No C Ext", host, python=python) @@ -717,32 +714,25 @@ def create_aws_auth_variants(): def create_alternative_hosts_variants(): - base_expansions = dict(SKIP_HATCH="true") + expansions = dict(SKIP_HATCH="true") batchtime = BATCHTIME_WEEK variants = [] host = "rhel7" - for auth, ssl in AUTH_SSLS: - expansions = base_expansions.copy() - expansions["AUTH"] = auth - expansions["SSL"] = ssl - variants.append( - create_variant( - [".5.0 .standalone"], - get_display_name("OpenSSL 1.0.2", "rhel7", python=CPYTHONS[0], **expansions), - host=host, - python=CPYTHONS[0], - batchtime=batchtime, - expansions=expansions, - ) + variants.append( + create_variant( + [".5.0 .standalone"], + get_display_name("OpenSSL 1.0.2", "rhel7", python=CPYTHONS[0], **expansions), + host=host, + python=CPYTHONS[0], + batchtime=batchtime, + expansions=expansions, ) + ) hosts = ["rhel92-fips", "rhel8-zseries-small", "rhel8-power-small", "rhel82-arm64-small"] host_names = ["RHEL9-FIPS", "RHEL8-zseries", "RHEL8-POWER8", "RHEL8-arm64"] - for (host, host_name), (auth, ssl) in product(zip(hosts, host_names), AUTH_SSLS): - expansions = base_expansions.copy() - expansions["AUTH"] = auth - expansions["SSL"] = ssl + for host, host_name in zip(hosts, host_names): variants.append( create_variant( [".6.0 .standalone"], @@ -755,6 +745,34 @@ def create_alternative_hosts_variants(): return variants +############## +# Tasks +############## + + +def create_server_tasks(): + tasks = [] + for topo, version, (auth, ssl), sync in product(TOPOLOGIES, ALL_VERSIONS, AUTH_SSLS, SYNCS): + name = f"test-{version}-{topo}-{auth}-{ssl}-{sync}".lower() + tags = [version, topo, auth, ssl, sync] + bootstrap_vars = dict( + VERSION=version, + TOPOLOGY=topo if topo != "standalone" else "server", + AUTH=auth, + SSL=ssl, + ) + bootstrap_func = FunctionCall(func="bootstrap mongo-orchestration", vars=bootstrap_vars) + test_vars = dict( + AUTH=auth, + SSL=ssl, + SYNC=sync, + TEST_SUITES="default" if sync == "sync" else "default_async", + ) + test_func = FunctionCall(func="run tests", vars=test_vars) + tasks.append(EvgTask(name=name, tags=tags, commands=[bootstrap_func, test_func])) + return tasks + + ################## # Generate Config ################## @@ -790,4 +808,35 @@ def write_variants_to_file(): fid.write(f"{line}\n") +def write_tasks_to_file(): + mod = sys.modules[__name__] + here = Path(__file__).absolute().parent + target = here.parent / "generated_configs" / "tasks.yml" + if target.exists(): + target.unlink() + with target.open("w") as fid: + fid.write("tasks:\n") + + for name, func in getmembers(mod, isfunction): + if not name.endswith("_tasks"): + continue + if not name.startswith("create_"): + raise ValueError("Task creators must start with create_") + title = name.replace("create_", "").replace("_tasks", "").replace("_", " ").capitalize() + project = EvgProject(tasks=func(), buildvariants=None) + out = ShrubService.generate_yaml(project).splitlines() + with target.open("a") as fid: + fid.write(f" # {title} tests\n") + for line in out[1:]: + fid.write(f"{line}\n") + fid.write("\n") + + # Remove extra trailing newline: + data = target.read_text().splitlines() + with target.open("w") as fid: + for line in data[:-1]: + fid.write(f"{line}\n") + + write_variants_to_file() +write_tasks_to_file() From 85ba541ed5a940bd3a7b7d967fe7510aac48a23d Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Fri, 25 Oct 2024 07:43:28 -0500 Subject: [PATCH 071/182] PYTHON-4905 Use shrub.py to generate load balancer tasks (#1968) --- .evergreen/config.yml | 9 - .evergreen/generated_configs/tasks.yml | 47 +++++ .evergreen/generated_configs/variants.yml | 225 ++++------------------ .evergreen/scripts/generate_config.py | 34 ++-- 4 files changed, 110 insertions(+), 205 deletions(-) diff --git a/.evergreen/config.yml b/.evergreen/config.yml index a1587a281d..fda6864317 100644 --- a/.evergreen/config.yml +++ b/.evergreen/config.yml @@ -1542,15 +1542,6 @@ tasks: - func: "run aws auth test with aws web identity credentials" - func: "run aws ECS auth test" - - name: load-balancer-test - commands: - - func: "bootstrap mongo-orchestration" - vars: - TOPOLOGY: "sharded_cluster" - LOAD_BALANCER: true - - func: "run load-balancer" - - func: "run tests" - - name: "oidc-auth-test" commands: - func: "run oidc auth test with test credentials" diff --git a/.evergreen/generated_configs/tasks.yml b/.evergreen/generated_configs/tasks.yml index fb3da4bb24..0f416ab595 100644 --- a/.evergreen/generated_configs/tasks.yml +++ b/.evergreen/generated_configs/tasks.yml @@ -1,4 +1,51 @@ tasks: + # Load balancer tests + - name: test-load-balancer-auth-ssl + commands: + - func: bootstrap mongo-orchestration + vars: + TOPOLOGY: sharded_cluster + AUTH: auth + SSL: ssl + LOAD_BALANCER: "true" + - func: run load-balancer + - func: run tests + vars: + AUTH: auth + SSL: ssl + test_loadbalancer: "true" + tags: [load-balancer, auth, ssl] + - name: test-load-balancer-noauth-ssl + commands: + - func: bootstrap mongo-orchestration + vars: + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: ssl + LOAD_BALANCER: "true" + - func: run load-balancer + - func: run tests + vars: + AUTH: noauth + SSL: ssl + test_loadbalancer: "true" + tags: [load-balancer, noauth, ssl] + - name: test-load-balancer-noauth-nossl + commands: + - func: bootstrap mongo-orchestration + vars: + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: nossl + LOAD_BALANCER: "true" + - func: run load-balancer + - func: run tests + vars: + AUTH: noauth + SSL: nossl + test_loadbalancer: "true" + tags: [load-balancer, noauth, nossl] + # Server tests - name: test-4.0-standalone-auth-ssl-sync commands: diff --git a/.evergreen/generated_configs/variants.yml b/.evergreen/generated_configs/variants.yml index 327becc249..f9a452b224 100644 --- a/.evergreen/generated_configs/variants.yml +++ b/.evergreen/generated_configs/variants.yml @@ -671,201 +671,56 @@ buildvariants: PYTHON_BINARY: /opt/python/3.12/bin/python3 # Load balancer tests - - name: load-balancer-rhel8-v6.0-py3.9-auth-ssl + - name: load-balancer-rhel8-v6.0-py3.9 tasks: - - name: load-balancer-test - display_name: Load Balancer RHEL8 v6.0 py3.9 Auth SSL + - name: .load-balancer + display_name: Load Balancer RHEL8 v6.0 py3.9 run_on: - rhel87-small batchtime: 10080 expansions: - VERSION: "6.0" - AUTH: auth - SSL: ssl - test_loadbalancer: "true" PYTHON_BINARY: /opt/python/3.9/bin/python3 - - name: load-balancer-rhel8-v6.0-py3.10-noauth-ssl - tasks: - - name: load-balancer-test - display_name: Load Balancer RHEL8 v6.0 py3.10 NoAuth SSL - run_on: - - rhel87-small - batchtime: 10080 - expansions: - VERSION: "6.0" - AUTH: noauth - SSL: ssl - test_loadbalancer: "true" - PYTHON_BINARY: /opt/python/3.10/bin/python3 - - name: load-balancer-rhel8-v6.0-py3.11-noauth-nossl - tasks: - - name: load-balancer-test - display_name: Load Balancer RHEL8 v6.0 py3.11 NoAuth NoSSL - run_on: - - rhel87-small - batchtime: 10080 - expansions: VERSION: "6.0" - AUTH: noauth - SSL: nossl - test_loadbalancer: "true" - PYTHON_BINARY: /opt/python/3.11/bin/python3 - - name: load-balancer-rhel8-v7.0-py3.12-auth-ssl + - name: load-balancer-rhel8-v7.0-py3.9 tasks: - - name: load-balancer-test - display_name: Load Balancer RHEL8 v7.0 py3.12 Auth SSL - run_on: - - rhel87-small - batchtime: 10080 - expansions: - VERSION: "7.0" - AUTH: auth - SSL: ssl - test_loadbalancer: "true" - PYTHON_BINARY: /opt/python/3.12/bin/python3 - - name: load-balancer-rhel8-v7.0-py3.13-noauth-ssl - tasks: - - name: load-balancer-test - display_name: Load Balancer RHEL8 v7.0 py3.13 NoAuth SSL + - name: .load-balancer + display_name: Load Balancer RHEL8 v7.0 py3.9 run_on: - rhel87-small batchtime: 10080 expansions: + PYTHON_BINARY: /opt/python/3.9/bin/python3 VERSION: "7.0" - AUTH: noauth - SSL: ssl - test_loadbalancer: "true" - PYTHON_BINARY: /opt/python/3.13/bin/python3 - - name: load-balancer-rhel8-v7.0-pypy3.9-noauth-nossl + - name: load-balancer-rhel8-v8.0-py3.9 tasks: - - name: load-balancer-test - display_name: Load Balancer RHEL8 v7.0 pypy3.9 NoAuth NoSSL + - name: .load-balancer + display_name: Load Balancer RHEL8 v8.0 py3.9 run_on: - rhel87-small batchtime: 10080 expansions: - VERSION: "7.0" - AUTH: noauth - SSL: nossl - test_loadbalancer: "true" - PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 - - name: load-balancer-rhel8-v8.0-pypy3.10-auth-ssl - tasks: - - name: load-balancer-test - display_name: Load Balancer RHEL8 v8.0 pypy3.10 Auth SSL - run_on: - - rhel87-small - batchtime: 10080 - expansions: - VERSION: "8.0" - AUTH: auth - SSL: ssl - test_loadbalancer: "true" - PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 - - name: load-balancer-rhel8-v8.0-py3.9-noauth-ssl - tasks: - - name: load-balancer-test - display_name: Load Balancer RHEL8 v8.0 py3.9 NoAuth SSL - run_on: - - rhel87-small - batchtime: 10080 - expansions: - VERSION: "8.0" - AUTH: noauth - SSL: ssl - test_loadbalancer: "true" PYTHON_BINARY: /opt/python/3.9/bin/python3 - - name: load-balancer-rhel8-v8.0-py3.10-noauth-nossl - tasks: - - name: load-balancer-test - display_name: Load Balancer RHEL8 v8.0 py3.10 NoAuth NoSSL - run_on: - - rhel87-small - batchtime: 10080 - expansions: VERSION: "8.0" - AUTH: noauth - SSL: nossl - test_loadbalancer: "true" - PYTHON_BINARY: /opt/python/3.10/bin/python3 - - name: load-balancer-rhel8-rapid-py3.11-auth-ssl - tasks: - - name: load-balancer-test - display_name: Load Balancer RHEL8 rapid py3.11 Auth SSL - run_on: - - rhel87-small - batchtime: 10080 - expansions: - VERSION: rapid - AUTH: auth - SSL: ssl - test_loadbalancer: "true" - PYTHON_BINARY: /opt/python/3.11/bin/python3 - - name: load-balancer-rhel8-rapid-py3.12-noauth-ssl - tasks: - - name: load-balancer-test - display_name: Load Balancer RHEL8 rapid py3.12 NoAuth SSL - run_on: - - rhel87-small - batchtime: 10080 - expansions: - VERSION: rapid - AUTH: noauth - SSL: ssl - test_loadbalancer: "true" - PYTHON_BINARY: /opt/python/3.12/bin/python3 - - name: load-balancer-rhel8-rapid-py3.13-noauth-nossl + - name: load-balancer-rhel8-rapid-py3.9 tasks: - - name: load-balancer-test - display_name: Load Balancer RHEL8 rapid py3.13 NoAuth NoSSL + - name: .load-balancer + display_name: Load Balancer RHEL8 rapid py3.9 run_on: - rhel87-small batchtime: 10080 expansions: + PYTHON_BINARY: /opt/python/3.9/bin/python3 VERSION: rapid - AUTH: noauth - SSL: nossl - test_loadbalancer: "true" - PYTHON_BINARY: /opt/python/3.13/bin/python3 - - name: load-balancer-rhel8-latest-pypy3.9-auth-ssl - tasks: - - name: load-balancer-test - display_name: Load Balancer RHEL8 latest pypy3.9 Auth SSL - run_on: - - rhel87-small - batchtime: 10080 - expansions: - VERSION: latest - AUTH: auth - SSL: ssl - test_loadbalancer: "true" - PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 - - name: load-balancer-rhel8-latest-pypy3.10-noauth-ssl + - name: load-balancer-rhel8-latest-py3.9 tasks: - - name: load-balancer-test - display_name: Load Balancer RHEL8 latest pypy3.10 NoAuth SSL + - name: .load-balancer + display_name: Load Balancer RHEL8 latest py3.9 run_on: - rhel87-small batchtime: 10080 expansions: - VERSION: latest - AUTH: noauth - SSL: ssl - test_loadbalancer: "true" - PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 - - name: load-balancer-rhel8-latest-py3.9-noauth-nossl - tasks: - - name: load-balancer-test - display_name: Load Balancer RHEL8 latest py3.9 NoAuth NoSSL - run_on: - - rhel87-small - batchtime: 10080 - expansions: - VERSION: latest - AUTH: noauth - SSL: nossl - test_loadbalancer: "true" PYTHON_BINARY: /opt/python/3.9/bin/python3 + VERSION: latest # Mockupdb tests - name: mockupdb-tests-rhel8-py3.9 @@ -951,10 +806,10 @@ buildvariants: PYTHON_BINARY: /opt/python/3.13/bin/python3 # Ocsp tests - - name: ocsp-test-rhel8-py3.9 + - name: ocsp-test-rhel8-v4.4-py3.9 tasks: - name: .ocsp - display_name: OCSP test RHEL8 py3.9 + display_name: OCSP test RHEL8 v4.4 py3.9 run_on: - rhel87-small batchtime: 20160 @@ -964,10 +819,10 @@ buildvariants: TOPOLOGY: server PYTHON_BINARY: /opt/python/3.9/bin/python3 VERSION: "4.4" - - name: ocsp-test-rhel8-py3.10 + - name: ocsp-test-rhel8-v5.0-py3.10 tasks: - name: .ocsp - display_name: OCSP test RHEL8 py3.10 + display_name: OCSP test RHEL8 v5.0 py3.10 run_on: - rhel87-small batchtime: 20160 @@ -977,10 +832,10 @@ buildvariants: TOPOLOGY: server PYTHON_BINARY: /opt/python/3.10/bin/python3 VERSION: "5.0" - - name: ocsp-test-rhel8-py3.11 + - name: ocsp-test-rhel8-v6.0-py3.11 tasks: - name: .ocsp - display_name: OCSP test RHEL8 py3.11 + display_name: OCSP test RHEL8 v6.0 py3.11 run_on: - rhel87-small batchtime: 20160 @@ -990,10 +845,10 @@ buildvariants: TOPOLOGY: server PYTHON_BINARY: /opt/python/3.11/bin/python3 VERSION: "6.0" - - name: ocsp-test-rhel8-py3.12 + - name: ocsp-test-rhel8-v7.0-py3.12 tasks: - name: .ocsp - display_name: OCSP test RHEL8 py3.12 + display_name: OCSP test RHEL8 v7.0 py3.12 run_on: - rhel87-small batchtime: 20160 @@ -1003,10 +858,10 @@ buildvariants: TOPOLOGY: server PYTHON_BINARY: /opt/python/3.12/bin/python3 VERSION: "7.0" - - name: ocsp-test-rhel8-py3.13 + - name: ocsp-test-rhel8-v8.0-py3.13 tasks: - name: .ocsp - display_name: OCSP test RHEL8 py3.13 + display_name: OCSP test RHEL8 v8.0 py3.13 run_on: - rhel87-small batchtime: 20160 @@ -1016,10 +871,10 @@ buildvariants: TOPOLOGY: server PYTHON_BINARY: /opt/python/3.13/bin/python3 VERSION: "8.0" - - name: ocsp-test-rhel8-pypy3.9 + - name: ocsp-test-rhel8-rapid-pypy3.9 tasks: - name: .ocsp - display_name: OCSP test RHEL8 pypy3.9 + display_name: OCSP test RHEL8 rapid pypy3.9 run_on: - rhel87-small batchtime: 20160 @@ -1029,10 +884,10 @@ buildvariants: TOPOLOGY: server PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 VERSION: rapid - - name: ocsp-test-rhel8-pypy3.10 + - name: ocsp-test-rhel8-latest-pypy3.10 tasks: - name: .ocsp - display_name: OCSP test RHEL8 pypy3.10 + display_name: OCSP test RHEL8 latest pypy3.10 run_on: - rhel87-small batchtime: 20160 @@ -1042,10 +897,10 @@ buildvariants: TOPOLOGY: server PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 VERSION: latest - - name: ocsp-test-win64-py3.9 + - name: ocsp-test-win64-v4.4-py3.9 tasks: - name: .ocsp-rsa !.ocsp-staple - display_name: OCSP test Win64 py3.9 + display_name: OCSP test Win64 v4.4 py3.9 run_on: - windows-64-vsMulti-small batchtime: 20160 @@ -1055,10 +910,10 @@ buildvariants: TOPOLOGY: server PYTHON_BINARY: C:/python/Python39/python.exe VERSION: "4.4" - - name: ocsp-test-win64-py3.13 + - name: ocsp-test-win64-v8.0-py3.13 tasks: - name: .ocsp-rsa !.ocsp-staple - display_name: OCSP test Win64 py3.13 + display_name: OCSP test Win64 v8.0 py3.13 run_on: - windows-64-vsMulti-small batchtime: 20160 @@ -1068,10 +923,10 @@ buildvariants: TOPOLOGY: server PYTHON_BINARY: C:/python/Python313/python.exe VERSION: "8.0" - - name: ocsp-test-macos-py3.9 + - name: ocsp-test-macos-v4.4-py3.9 tasks: - name: .ocsp-rsa !.ocsp-staple - display_name: OCSP test macOS py3.9 + display_name: OCSP test macOS v4.4 py3.9 run_on: - macos-14 batchtime: 20160 @@ -1081,10 +936,10 @@ buildvariants: TOPOLOGY: server PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 VERSION: "4.4" - - name: ocsp-test-macos-py3.13 + - name: ocsp-test-macos-v8.0-py3.13 tasks: - name: .ocsp-rsa !.ocsp-staple - display_name: OCSP test macOS py3.13 + display_name: OCSP test macOS v8.0 py3.13 run_on: - macos-14 batchtime: 20160 diff --git a/.evergreen/scripts/generate_config.py b/.evergreen/scripts/generate_config.py index b8b8fa367c..eefd04b040 100644 --- a/.evergreen/scripts/generate_config.py +++ b/.evergreen/scripts/generate_config.py @@ -152,6 +152,7 @@ def get_display_name(base: str, host: str | None = None, **kwargs) -> str: if host is not None: display_name += f" {HOSTS[host].display_name}" version = kwargs.pop("VERSION", None) + version = version or kwargs.pop("version", None) if version: if version not in ["rapid", "latest"]: version = f"v{version}" @@ -354,25 +355,20 @@ def get_encryption_expansions(encryption): def create_load_balancer_variants(): - # Load balancer tests - run all supported versions for all combinations of auth and ssl and system python. + # Load balancer tests - run all supported server versions using the lowest supported python. host = "rhel8" - task_names = ["load-balancer-test"] batchtime = BATCHTIME_WEEK - expansions_base = dict(test_loadbalancer="true") versions = get_versions_from("6.0") variants = [] - pythons = CPYTHONS + PYPYS - for ind, (version, (auth, ssl)) in enumerate(product(versions, AUTH_SSLS)): - expansions = dict(VERSION=version, AUTH=auth, SSL=ssl) - expansions.update(expansions_base) - python = pythons[ind % len(pythons)] - display_name = get_display_name("Load Balancer", host, python=python, **expansions) + for version in versions: + python = CPYTHONS[0] + display_name = get_display_name("Load Balancer", host, python=python, version=version) variant = create_variant( - task_names, + [".load-balancer"], display_name, python=python, host=host, - expansions=expansions, + version=version, batchtime=batchtime, ) variants.append(variant) @@ -773,6 +769,22 @@ def create_server_tasks(): return tasks +def create_load_balancer_tasks(): + tasks = [] + for auth, ssl in AUTH_SSLS: + name = f"test-load-balancer-{auth}-{ssl}".lower() + tags = ["load-balancer", auth, ssl] + bootstrap_vars = dict(TOPOLOGY="sharded_cluster", AUTH=auth, SSL=ssl, LOAD_BALANCER="true") + bootstrap_func = FunctionCall(func="bootstrap mongo-orchestration", vars=bootstrap_vars) + balancer_func = FunctionCall(func="run load-balancer") + test_vars = dict(AUTH=auth, SSL=ssl, test_loadbalancer="true") + test_func = FunctionCall(func="run tests", vars=test_vars) + tasks.append( + EvgTask(name=name, tags=tags, commands=[bootstrap_func, balancer_func, test_func]) + ) + return tasks + + ################## # Generate Config ################## From 97ac3ebee2e1d97dc4da1687b5277c16681dc3f0 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Fri, 25 Oct 2024 09:49:37 -0500 Subject: [PATCH 072/182] PYTHON-4738 Skip encryption fork test (#1972) --- test/asynchronous/test_encryption.py | 2 +- test/test_encryption.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/test/asynchronous/test_encryption.py b/test/asynchronous/test_encryption.py index 88b005c4b3..40f1acd32d 100644 --- a/test/asynchronous/test_encryption.py +++ b/test/asynchronous/test_encryption.py @@ -380,9 +380,9 @@ async def test_use_after_close(self): is_greenthread_patched(), "gevent and eventlet do not support POSIX-style forking.", ) - @unittest.skipIf("PyPy" in sys.version, "PYTHON-4738 fails often on PyPy") @async_client_context.require_sync async def test_fork(self): + self.skipTest("Test is flaky, PYTHON-4738") opts = AutoEncryptionOpts(KMS_PROVIDERS, "keyvault.datakeys") client = await self.async_rs_or_single_client(auto_encryption_opts=opts) diff --git a/test/test_encryption.py b/test/test_encryption.py index 13a69ca9ad..373981b1d2 100644 --- a/test/test_encryption.py +++ b/test/test_encryption.py @@ -380,9 +380,9 @@ def test_use_after_close(self): is_greenthread_patched(), "gevent and eventlet do not support POSIX-style forking.", ) - @unittest.skipIf("PyPy" in sys.version, "PYTHON-4738 fails often on PyPy") @client_context.require_sync def test_fork(self): + self.skipTest("Test is flaky, PYTHON-4738") opts = AutoEncryptionOpts(KMS_PROVIDERS, "keyvault.datakeys") client = self.rs_or_single_client(auto_encryption_opts=opts) From 4aeca321c5f8607ee8b528eebedf4a3badf7f967 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 28 Oct 2024 04:47:07 -0500 Subject: [PATCH 073/182] Bump mypy from 1.12.1 to 1.13.0 (#1974) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/typing.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/typing.txt b/requirements/typing.txt index 7ccc122f53..db0825c2b1 100644 --- a/requirements/typing.txt +++ b/requirements/typing.txt @@ -1,4 +1,4 @@ -mypy==1.12.1 +mypy==1.13.0 pyright==1.1.384 typing_extensions -r ./encryption.txt From 72863862c921cbb0697e3c1ca61ed4de17f012c5 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Mon, 28 Oct 2024 07:49:11 -0500 Subject: [PATCH 074/182] PYTHON-4910 Add server tasks that use both sync and async (#1975) --- .evergreen/generated_configs/tasks.yml | 1440 +++++++++++++++++++++ .evergreen/generated_configs/variants.yml | 292 ++--- .evergreen/scripts/generate_config.py | 45 +- 3 files changed, 1611 insertions(+), 166 deletions(-) diff --git a/.evergreen/generated_configs/tasks.yml b/.evergreen/generated_configs/tasks.yml index 0f416ab595..c666c6901a 100644 --- a/.evergreen/generated_configs/tasks.yml +++ b/.evergreen/generated_configs/tasks.yml @@ -87,6 +87,26 @@ tasks: - auth - ssl - async + - name: test-4.0-standalone-auth-ssl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.0" + TOPOLOGY: server + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "4.0" + - standalone + - auth + - ssl + - sync_async - name: test-4.0-standalone-noauth-ssl-sync commands: - func: bootstrap mongo-orchestration @@ -127,6 +147,26 @@ tasks: - noauth - ssl - async + - name: test-4.0-standalone-noauth-ssl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.0" + TOPOLOGY: server + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "4.0" + - standalone + - noauth + - ssl + - sync_async - name: test-4.0-standalone-noauth-nossl-sync commands: - func: bootstrap mongo-orchestration @@ -167,6 +207,26 @@ tasks: - noauth - nossl - async + - name: test-4.0-standalone-noauth-nossl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.0" + TOPOLOGY: server + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "4.0" + - standalone + - noauth + - nossl + - sync_async - name: test-4.4-standalone-auth-ssl-sync commands: - func: bootstrap mongo-orchestration @@ -207,6 +267,26 @@ tasks: - auth - ssl - async + - name: test-4.4-standalone-auth-ssl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.4" + TOPOLOGY: server + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "4.4" + - standalone + - auth + - ssl + - sync_async - name: test-4.4-standalone-noauth-ssl-sync commands: - func: bootstrap mongo-orchestration @@ -247,6 +327,26 @@ tasks: - noauth - ssl - async + - name: test-4.4-standalone-noauth-ssl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.4" + TOPOLOGY: server + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "4.4" + - standalone + - noauth + - ssl + - sync_async - name: test-4.4-standalone-noauth-nossl-sync commands: - func: bootstrap mongo-orchestration @@ -287,6 +387,26 @@ tasks: - noauth - nossl - async + - name: test-4.4-standalone-noauth-nossl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.4" + TOPOLOGY: server + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "4.4" + - standalone + - noauth + - nossl + - sync_async - name: test-5.0-standalone-auth-ssl-sync commands: - func: bootstrap mongo-orchestration @@ -327,6 +447,26 @@ tasks: - auth - ssl - async + - name: test-5.0-standalone-auth-ssl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "5.0" + TOPOLOGY: server + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "5.0" + - standalone + - auth + - ssl + - sync_async - name: test-5.0-standalone-noauth-ssl-sync commands: - func: bootstrap mongo-orchestration @@ -367,6 +507,26 @@ tasks: - noauth - ssl - async + - name: test-5.0-standalone-noauth-ssl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "5.0" + TOPOLOGY: server + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "5.0" + - standalone + - noauth + - ssl + - sync_async - name: test-5.0-standalone-noauth-nossl-sync commands: - func: bootstrap mongo-orchestration @@ -407,6 +567,26 @@ tasks: - noauth - nossl - async + - name: test-5.0-standalone-noauth-nossl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "5.0" + TOPOLOGY: server + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "5.0" + - standalone + - noauth + - nossl + - sync_async - name: test-6.0-standalone-auth-ssl-sync commands: - func: bootstrap mongo-orchestration @@ -447,6 +627,26 @@ tasks: - auth - ssl - async + - name: test-6.0-standalone-auth-ssl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "6.0" + TOPOLOGY: server + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "6.0" + - standalone + - auth + - ssl + - sync_async - name: test-6.0-standalone-noauth-ssl-sync commands: - func: bootstrap mongo-orchestration @@ -487,6 +687,26 @@ tasks: - noauth - ssl - async + - name: test-6.0-standalone-noauth-ssl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "6.0" + TOPOLOGY: server + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "6.0" + - standalone + - noauth + - ssl + - sync_async - name: test-6.0-standalone-noauth-nossl-sync commands: - func: bootstrap mongo-orchestration @@ -527,6 +747,26 @@ tasks: - noauth - nossl - async + - name: test-6.0-standalone-noauth-nossl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "6.0" + TOPOLOGY: server + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "6.0" + - standalone + - noauth + - nossl + - sync_async - name: test-7.0-standalone-auth-ssl-sync commands: - func: bootstrap mongo-orchestration @@ -567,6 +807,26 @@ tasks: - auth - ssl - async + - name: test-7.0-standalone-auth-ssl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "7.0" + TOPOLOGY: server + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "7.0" + - standalone + - auth + - ssl + - sync_async - name: test-7.0-standalone-noauth-ssl-sync commands: - func: bootstrap mongo-orchestration @@ -607,6 +867,26 @@ tasks: - noauth - ssl - async + - name: test-7.0-standalone-noauth-ssl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "7.0" + TOPOLOGY: server + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "7.0" + - standalone + - noauth + - ssl + - sync_async - name: test-7.0-standalone-noauth-nossl-sync commands: - func: bootstrap mongo-orchestration @@ -647,6 +927,26 @@ tasks: - noauth - nossl - async + - name: test-7.0-standalone-noauth-nossl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "7.0" + TOPOLOGY: server + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "7.0" + - standalone + - noauth + - nossl + - sync_async - name: test-8.0-standalone-auth-ssl-sync commands: - func: bootstrap mongo-orchestration @@ -687,6 +987,26 @@ tasks: - auth - ssl - async + - name: test-8.0-standalone-auth-ssl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "8.0" + TOPOLOGY: server + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "8.0" + - standalone + - auth + - ssl + - sync_async - name: test-8.0-standalone-noauth-ssl-sync commands: - func: bootstrap mongo-orchestration @@ -727,6 +1047,26 @@ tasks: - noauth - ssl - async + - name: test-8.0-standalone-noauth-ssl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "8.0" + TOPOLOGY: server + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "8.0" + - standalone + - noauth + - ssl + - sync_async - name: test-8.0-standalone-noauth-nossl-sync commands: - func: bootstrap mongo-orchestration @@ -767,6 +1107,26 @@ tasks: - noauth - nossl - async + - name: test-8.0-standalone-noauth-nossl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "8.0" + TOPOLOGY: server + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "8.0" + - standalone + - noauth + - nossl + - sync_async - name: test-rapid-standalone-auth-ssl-sync commands: - func: bootstrap mongo-orchestration @@ -807,6 +1167,26 @@ tasks: - auth - ssl - async + - name: test-rapid-standalone-auth-ssl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: rapid + TOPOLOGY: server + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: sync_async + TEST_SUITES: "" + tags: + - rapid + - standalone + - auth + - ssl + - sync_async - name: test-rapid-standalone-noauth-ssl-sync commands: - func: bootstrap mongo-orchestration @@ -847,6 +1227,26 @@ tasks: - noauth - ssl - async + - name: test-rapid-standalone-noauth-ssl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: rapid + TOPOLOGY: server + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: sync_async + TEST_SUITES: "" + tags: + - rapid + - standalone + - noauth + - ssl + - sync_async - name: test-rapid-standalone-noauth-nossl-sync commands: - func: bootstrap mongo-orchestration @@ -887,6 +1287,26 @@ tasks: - noauth - nossl - async + - name: test-rapid-standalone-noauth-nossl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: rapid + TOPOLOGY: server + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: sync_async + TEST_SUITES: "" + tags: + - rapid + - standalone + - noauth + - nossl + - sync_async - name: test-latest-standalone-auth-ssl-sync commands: - func: bootstrap mongo-orchestration @@ -927,6 +1347,26 @@ tasks: - auth - ssl - async + - name: test-latest-standalone-auth-ssl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: latest + TOPOLOGY: server + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: sync_async + TEST_SUITES: "" + tags: + - latest + - standalone + - auth + - ssl + - sync_async - name: test-latest-standalone-noauth-ssl-sync commands: - func: bootstrap mongo-orchestration @@ -967,6 +1407,26 @@ tasks: - noauth - ssl - async + - name: test-latest-standalone-noauth-ssl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: latest + TOPOLOGY: server + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: sync_async + TEST_SUITES: "" + tags: + - latest + - standalone + - noauth + - ssl + - sync_async - name: test-latest-standalone-noauth-nossl-sync commands: - func: bootstrap mongo-orchestration @@ -1007,6 +1467,26 @@ tasks: - noauth - nossl - async + - name: test-latest-standalone-noauth-nossl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: latest + TOPOLOGY: server + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: sync_async + TEST_SUITES: "" + tags: + - latest + - standalone + - noauth + - nossl + - sync_async - name: test-4.0-replica_set-auth-ssl-sync commands: - func: bootstrap mongo-orchestration @@ -1047,6 +1527,26 @@ tasks: - auth - ssl - async + - name: test-4.0-replica_set-auth-ssl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.0" + TOPOLOGY: replica_set + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "4.0" + - replica_set + - auth + - ssl + - sync_async - name: test-4.0-replica_set-noauth-ssl-sync commands: - func: bootstrap mongo-orchestration @@ -1087,6 +1587,26 @@ tasks: - noauth - ssl - async + - name: test-4.0-replica_set-noauth-ssl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.0" + TOPOLOGY: replica_set + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "4.0" + - replica_set + - noauth + - ssl + - sync_async - name: test-4.0-replica_set-noauth-nossl-sync commands: - func: bootstrap mongo-orchestration @@ -1127,6 +1647,26 @@ tasks: - noauth - nossl - async + - name: test-4.0-replica_set-noauth-nossl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.0" + TOPOLOGY: replica_set + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "4.0" + - replica_set + - noauth + - nossl + - sync_async - name: test-4.4-replica_set-auth-ssl-sync commands: - func: bootstrap mongo-orchestration @@ -1167,6 +1707,26 @@ tasks: - auth - ssl - async + - name: test-4.4-replica_set-auth-ssl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.4" + TOPOLOGY: replica_set + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "4.4" + - replica_set + - auth + - ssl + - sync_async - name: test-4.4-replica_set-noauth-ssl-sync commands: - func: bootstrap mongo-orchestration @@ -1207,6 +1767,26 @@ tasks: - noauth - ssl - async + - name: test-4.4-replica_set-noauth-ssl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.4" + TOPOLOGY: replica_set + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "4.4" + - replica_set + - noauth + - ssl + - sync_async - name: test-4.4-replica_set-noauth-nossl-sync commands: - func: bootstrap mongo-orchestration @@ -1247,6 +1827,26 @@ tasks: - noauth - nossl - async + - name: test-4.4-replica_set-noauth-nossl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.4" + TOPOLOGY: replica_set + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "4.4" + - replica_set + - noauth + - nossl + - sync_async - name: test-5.0-replica_set-auth-ssl-sync commands: - func: bootstrap mongo-orchestration @@ -1287,6 +1887,26 @@ tasks: - auth - ssl - async + - name: test-5.0-replica_set-auth-ssl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "5.0" + TOPOLOGY: replica_set + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "5.0" + - replica_set + - auth + - ssl + - sync_async - name: test-5.0-replica_set-noauth-ssl-sync commands: - func: bootstrap mongo-orchestration @@ -1327,6 +1947,26 @@ tasks: - noauth - ssl - async + - name: test-5.0-replica_set-noauth-ssl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "5.0" + TOPOLOGY: replica_set + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "5.0" + - replica_set + - noauth + - ssl + - sync_async - name: test-5.0-replica_set-noauth-nossl-sync commands: - func: bootstrap mongo-orchestration @@ -1367,6 +2007,26 @@ tasks: - noauth - nossl - async + - name: test-5.0-replica_set-noauth-nossl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "5.0" + TOPOLOGY: replica_set + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "5.0" + - replica_set + - noauth + - nossl + - sync_async - name: test-6.0-replica_set-auth-ssl-sync commands: - func: bootstrap mongo-orchestration @@ -1407,6 +2067,26 @@ tasks: - auth - ssl - async + - name: test-6.0-replica_set-auth-ssl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "6.0" + TOPOLOGY: replica_set + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "6.0" + - replica_set + - auth + - ssl + - sync_async - name: test-6.0-replica_set-noauth-ssl-sync commands: - func: bootstrap mongo-orchestration @@ -1447,6 +2127,26 @@ tasks: - noauth - ssl - async + - name: test-6.0-replica_set-noauth-ssl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "6.0" + TOPOLOGY: replica_set + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "6.0" + - replica_set + - noauth + - ssl + - sync_async - name: test-6.0-replica_set-noauth-nossl-sync commands: - func: bootstrap mongo-orchestration @@ -1487,6 +2187,26 @@ tasks: - noauth - nossl - async + - name: test-6.0-replica_set-noauth-nossl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "6.0" + TOPOLOGY: replica_set + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "6.0" + - replica_set + - noauth + - nossl + - sync_async - name: test-7.0-replica_set-auth-ssl-sync commands: - func: bootstrap mongo-orchestration @@ -1527,6 +2247,26 @@ tasks: - auth - ssl - async + - name: test-7.0-replica_set-auth-ssl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "7.0" + TOPOLOGY: replica_set + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "7.0" + - replica_set + - auth + - ssl + - sync_async - name: test-7.0-replica_set-noauth-ssl-sync commands: - func: bootstrap mongo-orchestration @@ -1567,6 +2307,26 @@ tasks: - noauth - ssl - async + - name: test-7.0-replica_set-noauth-ssl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "7.0" + TOPOLOGY: replica_set + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "7.0" + - replica_set + - noauth + - ssl + - sync_async - name: test-7.0-replica_set-noauth-nossl-sync commands: - func: bootstrap mongo-orchestration @@ -1607,6 +2367,26 @@ tasks: - noauth - nossl - async + - name: test-7.0-replica_set-noauth-nossl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "7.0" + TOPOLOGY: replica_set + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "7.0" + - replica_set + - noauth + - nossl + - sync_async - name: test-8.0-replica_set-auth-ssl-sync commands: - func: bootstrap mongo-orchestration @@ -1647,6 +2427,26 @@ tasks: - auth - ssl - async + - name: test-8.0-replica_set-auth-ssl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "8.0" + TOPOLOGY: replica_set + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "8.0" + - replica_set + - auth + - ssl + - sync_async - name: test-8.0-replica_set-noauth-ssl-sync commands: - func: bootstrap mongo-orchestration @@ -1687,6 +2487,26 @@ tasks: - noauth - ssl - async + - name: test-8.0-replica_set-noauth-ssl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "8.0" + TOPOLOGY: replica_set + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "8.0" + - replica_set + - noauth + - ssl + - sync_async - name: test-8.0-replica_set-noauth-nossl-sync commands: - func: bootstrap mongo-orchestration @@ -1727,6 +2547,26 @@ tasks: - noauth - nossl - async + - name: test-8.0-replica_set-noauth-nossl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "8.0" + TOPOLOGY: replica_set + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "8.0" + - replica_set + - noauth + - nossl + - sync_async - name: test-rapid-replica_set-auth-ssl-sync commands: - func: bootstrap mongo-orchestration @@ -1767,6 +2607,26 @@ tasks: - auth - ssl - async + - name: test-rapid-replica_set-auth-ssl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: rapid + TOPOLOGY: replica_set + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: sync_async + TEST_SUITES: "" + tags: + - rapid + - replica_set + - auth + - ssl + - sync_async - name: test-rapid-replica_set-noauth-ssl-sync commands: - func: bootstrap mongo-orchestration @@ -1807,6 +2667,26 @@ tasks: - noauth - ssl - async + - name: test-rapid-replica_set-noauth-ssl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: rapid + TOPOLOGY: replica_set + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: sync_async + TEST_SUITES: "" + tags: + - rapid + - replica_set + - noauth + - ssl + - sync_async - name: test-rapid-replica_set-noauth-nossl-sync commands: - func: bootstrap mongo-orchestration @@ -1847,6 +2727,26 @@ tasks: - noauth - nossl - async + - name: test-rapid-replica_set-noauth-nossl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: rapid + TOPOLOGY: replica_set + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: sync_async + TEST_SUITES: "" + tags: + - rapid + - replica_set + - noauth + - nossl + - sync_async - name: test-latest-replica_set-auth-ssl-sync commands: - func: bootstrap mongo-orchestration @@ -1887,6 +2787,26 @@ tasks: - auth - ssl - async + - name: test-latest-replica_set-auth-ssl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: latest + TOPOLOGY: replica_set + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: sync_async + TEST_SUITES: "" + tags: + - latest + - replica_set + - auth + - ssl + - sync_async - name: test-latest-replica_set-noauth-ssl-sync commands: - func: bootstrap mongo-orchestration @@ -1927,6 +2847,26 @@ tasks: - noauth - ssl - async + - name: test-latest-replica_set-noauth-ssl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: latest + TOPOLOGY: replica_set + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: sync_async + TEST_SUITES: "" + tags: + - latest + - replica_set + - noauth + - ssl + - sync_async - name: test-latest-replica_set-noauth-nossl-sync commands: - func: bootstrap mongo-orchestration @@ -1967,6 +2907,26 @@ tasks: - noauth - nossl - async + - name: test-latest-replica_set-noauth-nossl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: latest + TOPOLOGY: replica_set + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: sync_async + TEST_SUITES: "" + tags: + - latest + - replica_set + - noauth + - nossl + - sync_async - name: test-4.0-sharded_cluster-auth-ssl-sync commands: - func: bootstrap mongo-orchestration @@ -2007,6 +2967,26 @@ tasks: - auth - ssl - async + - name: test-4.0-sharded_cluster-auth-ssl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.0" + TOPOLOGY: sharded_cluster + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "4.0" + - sharded_cluster + - auth + - ssl + - sync_async - name: test-4.0-sharded_cluster-noauth-ssl-sync commands: - func: bootstrap mongo-orchestration @@ -2047,6 +3027,26 @@ tasks: - noauth - ssl - async + - name: test-4.0-sharded_cluster-noauth-ssl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.0" + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "4.0" + - sharded_cluster + - noauth + - ssl + - sync_async - name: test-4.0-sharded_cluster-noauth-nossl-sync commands: - func: bootstrap mongo-orchestration @@ -2087,6 +3087,26 @@ tasks: - noauth - nossl - async + - name: test-4.0-sharded_cluster-noauth-nossl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.0" + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "4.0" + - sharded_cluster + - noauth + - nossl + - sync_async - name: test-4.4-sharded_cluster-auth-ssl-sync commands: - func: bootstrap mongo-orchestration @@ -2127,6 +3147,26 @@ tasks: - auth - ssl - async + - name: test-4.4-sharded_cluster-auth-ssl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.4" + TOPOLOGY: sharded_cluster + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "4.4" + - sharded_cluster + - auth + - ssl + - sync_async - name: test-4.4-sharded_cluster-noauth-ssl-sync commands: - func: bootstrap mongo-orchestration @@ -2167,6 +3207,26 @@ tasks: - noauth - ssl - async + - name: test-4.4-sharded_cluster-noauth-ssl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.4" + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "4.4" + - sharded_cluster + - noauth + - ssl + - sync_async - name: test-4.4-sharded_cluster-noauth-nossl-sync commands: - func: bootstrap mongo-orchestration @@ -2207,6 +3267,26 @@ tasks: - noauth - nossl - async + - name: test-4.4-sharded_cluster-noauth-nossl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "4.4" + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "4.4" + - sharded_cluster + - noauth + - nossl + - sync_async - name: test-5.0-sharded_cluster-auth-ssl-sync commands: - func: bootstrap mongo-orchestration @@ -2247,6 +3327,26 @@ tasks: - auth - ssl - async + - name: test-5.0-sharded_cluster-auth-ssl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "5.0" + TOPOLOGY: sharded_cluster + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "5.0" + - sharded_cluster + - auth + - ssl + - sync_async - name: test-5.0-sharded_cluster-noauth-ssl-sync commands: - func: bootstrap mongo-orchestration @@ -2287,6 +3387,26 @@ tasks: - noauth - ssl - async + - name: test-5.0-sharded_cluster-noauth-ssl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "5.0" + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "5.0" + - sharded_cluster + - noauth + - ssl + - sync_async - name: test-5.0-sharded_cluster-noauth-nossl-sync commands: - func: bootstrap mongo-orchestration @@ -2327,6 +3447,26 @@ tasks: - noauth - nossl - async + - name: test-5.0-sharded_cluster-noauth-nossl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "5.0" + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "5.0" + - sharded_cluster + - noauth + - nossl + - sync_async - name: test-6.0-sharded_cluster-auth-ssl-sync commands: - func: bootstrap mongo-orchestration @@ -2367,6 +3507,26 @@ tasks: - auth - ssl - async + - name: test-6.0-sharded_cluster-auth-ssl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "6.0" + TOPOLOGY: sharded_cluster + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "6.0" + - sharded_cluster + - auth + - ssl + - sync_async - name: test-6.0-sharded_cluster-noauth-ssl-sync commands: - func: bootstrap mongo-orchestration @@ -2407,6 +3567,26 @@ tasks: - noauth - ssl - async + - name: test-6.0-sharded_cluster-noauth-ssl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "6.0" + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "6.0" + - sharded_cluster + - noauth + - ssl + - sync_async - name: test-6.0-sharded_cluster-noauth-nossl-sync commands: - func: bootstrap mongo-orchestration @@ -2447,6 +3627,26 @@ tasks: - noauth - nossl - async + - name: test-6.0-sharded_cluster-noauth-nossl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "6.0" + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "6.0" + - sharded_cluster + - noauth + - nossl + - sync_async - name: test-7.0-sharded_cluster-auth-ssl-sync commands: - func: bootstrap mongo-orchestration @@ -2487,6 +3687,26 @@ tasks: - auth - ssl - async + - name: test-7.0-sharded_cluster-auth-ssl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "7.0" + TOPOLOGY: sharded_cluster + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "7.0" + - sharded_cluster + - auth + - ssl + - sync_async - name: test-7.0-sharded_cluster-noauth-ssl-sync commands: - func: bootstrap mongo-orchestration @@ -2527,6 +3747,26 @@ tasks: - noauth - ssl - async + - name: test-7.0-sharded_cluster-noauth-ssl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "7.0" + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "7.0" + - sharded_cluster + - noauth + - ssl + - sync_async - name: test-7.0-sharded_cluster-noauth-nossl-sync commands: - func: bootstrap mongo-orchestration @@ -2567,6 +3807,26 @@ tasks: - noauth - nossl - async + - name: test-7.0-sharded_cluster-noauth-nossl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "7.0" + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "7.0" + - sharded_cluster + - noauth + - nossl + - sync_async - name: test-8.0-sharded_cluster-auth-ssl-sync commands: - func: bootstrap mongo-orchestration @@ -2607,6 +3867,26 @@ tasks: - auth - ssl - async + - name: test-8.0-sharded_cluster-auth-ssl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "8.0" + TOPOLOGY: sharded_cluster + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "8.0" + - sharded_cluster + - auth + - ssl + - sync_async - name: test-8.0-sharded_cluster-noauth-ssl-sync commands: - func: bootstrap mongo-orchestration @@ -2647,6 +3927,26 @@ tasks: - noauth - ssl - async + - name: test-8.0-sharded_cluster-noauth-ssl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "8.0" + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "8.0" + - sharded_cluster + - noauth + - ssl + - sync_async - name: test-8.0-sharded_cluster-noauth-nossl-sync commands: - func: bootstrap mongo-orchestration @@ -2687,6 +3987,26 @@ tasks: - noauth - nossl - async + - name: test-8.0-sharded_cluster-noauth-nossl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: "8.0" + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: sync_async + TEST_SUITES: "" + tags: + - "8.0" + - sharded_cluster + - noauth + - nossl + - sync_async - name: test-rapid-sharded_cluster-auth-ssl-sync commands: - func: bootstrap mongo-orchestration @@ -2727,6 +4047,26 @@ tasks: - auth - ssl - async + - name: test-rapid-sharded_cluster-auth-ssl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: rapid + TOPOLOGY: sharded_cluster + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: sync_async + TEST_SUITES: "" + tags: + - rapid + - sharded_cluster + - auth + - ssl + - sync_async - name: test-rapid-sharded_cluster-noauth-ssl-sync commands: - func: bootstrap mongo-orchestration @@ -2767,6 +4107,26 @@ tasks: - noauth - ssl - async + - name: test-rapid-sharded_cluster-noauth-ssl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: rapid + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: sync_async + TEST_SUITES: "" + tags: + - rapid + - sharded_cluster + - noauth + - ssl + - sync_async - name: test-rapid-sharded_cluster-noauth-nossl-sync commands: - func: bootstrap mongo-orchestration @@ -2807,6 +4167,26 @@ tasks: - noauth - nossl - async + - name: test-rapid-sharded_cluster-noauth-nossl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: rapid + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: sync_async + TEST_SUITES: "" + tags: + - rapid + - sharded_cluster + - noauth + - nossl + - sync_async - name: test-latest-sharded_cluster-auth-ssl-sync commands: - func: bootstrap mongo-orchestration @@ -2847,6 +4227,26 @@ tasks: - auth - ssl - async + - name: test-latest-sharded_cluster-auth-ssl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: latest + TOPOLOGY: sharded_cluster + AUTH: auth + SSL: ssl + - func: run tests + vars: + AUTH: auth + SSL: ssl + SYNC: sync_async + TEST_SUITES: "" + tags: + - latest + - sharded_cluster + - auth + - ssl + - sync_async - name: test-latest-sharded_cluster-noauth-ssl-sync commands: - func: bootstrap mongo-orchestration @@ -2887,6 +4287,26 @@ tasks: - noauth - ssl - async + - name: test-latest-sharded_cluster-noauth-ssl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: latest + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: ssl + - func: run tests + vars: + AUTH: noauth + SSL: ssl + SYNC: sync_async + TEST_SUITES: "" + tags: + - latest + - sharded_cluster + - noauth + - ssl + - sync_async - name: test-latest-sharded_cluster-noauth-nossl-sync commands: - func: bootstrap mongo-orchestration @@ -2927,3 +4347,23 @@ tasks: - noauth - nossl - async + - name: test-latest-sharded_cluster-noauth-nossl-sync_async + commands: + - func: bootstrap mongo-orchestration + vars: + VERSION: latest + TOPOLOGY: sharded_cluster + AUTH: noauth + SSL: nossl + - func: run tests + vars: + AUTH: noauth + SSL: nossl + SYNC: sync_async + TEST_SUITES: "" + tags: + - latest + - sharded_cluster + - noauth + - nossl + - sync_async diff --git a/.evergreen/generated_configs/variants.yml b/.evergreen/generated_configs/variants.yml index f9a452b224..240b237fdc 100644 --- a/.evergreen/generated_configs/variants.yml +++ b/.evergreen/generated_configs/variants.yml @@ -2,7 +2,7 @@ buildvariants: # Alternative hosts tests - name: openssl-1.0.2-rhel7-py3.9 tasks: - - name: .5.0 .standalone + - name: .5.0 .standalone !.sync_async display_name: OpenSSL 1.0.2 RHEL7 py3.9 run_on: - rhel79-small @@ -12,7 +12,7 @@ buildvariants: PYTHON_BINARY: /opt/python/3.9/bin/python3 - name: other-hosts-rhel9-fips tasks: - - name: .6.0 .standalone + - name: .6.0 .standalone !.sync_async display_name: Other hosts RHEL9-FIPS run_on: - rhel92-fips @@ -21,7 +21,7 @@ buildvariants: SKIP_HATCH: "true" - name: other-hosts-rhel8-zseries tasks: - - name: .6.0 .standalone + - name: .6.0 .standalone !.sync_async display_name: Other hosts RHEL8-zseries run_on: - rhel8-zseries-small @@ -30,7 +30,7 @@ buildvariants: SKIP_HATCH: "true" - name: other-hosts-rhel8-power8 tasks: - - name: .6.0 .standalone + - name: .6.0 .standalone !.sync_async display_name: Other hosts RHEL8-POWER8 run_on: - rhel8-power-small @@ -39,7 +39,7 @@ buildvariants: SKIP_HATCH: "true" - name: other-hosts-rhel8-arm64 tasks: - - name: .6.0 .standalone + - name: .6.0 .standalone !.sync_async display_name: Other hosts RHEL8-arm64 run_on: - rhel82-arm64-small @@ -198,7 +198,7 @@ buildvariants: # Compression tests - name: snappy-compression-rhel8-py3.9-no-c tasks: - - name: .standalone .noauth .nossl + - name: .standalone .noauth .nossl .sync_async display_name: snappy compression RHEL8 py3.9 No C run_on: - rhel87-small @@ -208,7 +208,7 @@ buildvariants: PYTHON_BINARY: /opt/python/3.9/bin/python3 - name: snappy-compression-rhel8-py3.10 tasks: - - name: .standalone .noauth .nossl + - name: .standalone .noauth .nossl .sync_async display_name: snappy compression RHEL8 py3.10 run_on: - rhel87-small @@ -217,7 +217,7 @@ buildvariants: PYTHON_BINARY: /opt/python/3.10/bin/python3 - name: zlib-compression-rhel8-py3.11-no-c tasks: - - name: .standalone .noauth .nossl + - name: .standalone .noauth .nossl .sync_async display_name: zlib compression RHEL8 py3.11 No C run_on: - rhel87-small @@ -227,7 +227,7 @@ buildvariants: PYTHON_BINARY: /opt/python/3.11/bin/python3 - name: zlib-compression-rhel8-py3.12 tasks: - - name: .standalone .noauth .nossl + - name: .standalone .noauth .nossl .sync_async display_name: zlib compression RHEL8 py3.12 run_on: - rhel87-small @@ -236,7 +236,7 @@ buildvariants: PYTHON_BINARY: /opt/python/3.12/bin/python3 - name: zstd-compression-rhel8-py3.13-no-c tasks: - - name: .standalone .noauth .nossl !.4.0 + - name: .standalone .noauth .nossl .sync_async !.4.0 display_name: zstd compression RHEL8 py3.13 No C run_on: - rhel87-small @@ -246,7 +246,7 @@ buildvariants: PYTHON_BINARY: /opt/python/3.13/bin/python3 - name: zstd-compression-rhel8-py3.9 tasks: - - name: .standalone .noauth .nossl !.4.0 + - name: .standalone .noauth .nossl .sync_async !.4.0 display_name: zstd compression RHEL8 py3.9 run_on: - rhel87-small @@ -255,7 +255,7 @@ buildvariants: PYTHON_BINARY: /opt/python/3.9/bin/python3 - name: snappy-compression-rhel8-pypy3.9 tasks: - - name: .standalone .noauth .nossl + - name: .standalone .noauth .nossl .sync_async display_name: snappy compression RHEL8 pypy3.9 run_on: - rhel87-small @@ -264,7 +264,7 @@ buildvariants: PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 - name: zlib-compression-rhel8-pypy3.10 tasks: - - name: .standalone .noauth .nossl + - name: .standalone .noauth .nossl .sync_async display_name: zlib compression RHEL8 pypy3.10 run_on: - rhel87-small @@ -273,7 +273,7 @@ buildvariants: PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 - name: zstd-compression-rhel8-pypy3.9 tasks: - - name: .standalone .noauth .nossl !.4.0 + - name: .standalone .noauth .nossl .sync_async !.4.0 display_name: zstd compression RHEL8 pypy3.9 run_on: - rhel87-small @@ -284,7 +284,7 @@ buildvariants: # Disable test commands tests - name: disable-test-commands-rhel8-py3.9 tasks: - - name: .latest + - name: .latest .sync_async display_name: Disable test commands RHEL8 py3.9 run_on: - rhel87-small @@ -307,9 +307,9 @@ buildvariants: # Encryption tests - name: encryption-rhel8-py3.9 tasks: - - name: .sharded_cluster .auth .ssl - - name: .replica_set .noauth .ssl - - name: .standalone .noauth .nossl + - name: .sharded_cluster .auth .ssl .sync_async + - name: .replica_set .noauth .ssl .sync_async + - name: .standalone .noauth .nossl .sync_async display_name: Encryption RHEL8 py3.9 run_on: - rhel87-small @@ -320,9 +320,9 @@ buildvariants: tags: [encryption_tag] - name: encryption-rhel8-py3.13 tasks: - - name: .sharded_cluster .auth .ssl - - name: .replica_set .noauth .ssl - - name: .standalone .noauth .nossl + - name: .sharded_cluster .auth .ssl .sync_async + - name: .replica_set .noauth .ssl .sync_async + - name: .standalone .noauth .nossl .sync_async display_name: Encryption RHEL8 py3.13 run_on: - rhel87-small @@ -333,9 +333,9 @@ buildvariants: tags: [encryption_tag] - name: encryption-rhel8-pypy3.10 tasks: - - name: .sharded_cluster .auth .ssl - - name: .replica_set .noauth .ssl - - name: .standalone .noauth .nossl + - name: .sharded_cluster .auth .ssl .sync_async + - name: .replica_set .noauth .ssl .sync_async + - name: .standalone .noauth .nossl .sync_async display_name: Encryption RHEL8 pypy3.10 run_on: - rhel87-small @@ -346,9 +346,9 @@ buildvariants: tags: [encryption_tag] - name: encryption-crypt_shared-rhel8-py3.9 tasks: - - name: .sharded_cluster .auth .ssl - - name: .replica_set .noauth .ssl - - name: .standalone .noauth .nossl + - name: .sharded_cluster .auth .ssl .sync_async + - name: .replica_set .noauth .ssl .sync_async + - name: .standalone .noauth .nossl .sync_async display_name: Encryption crypt_shared RHEL8 py3.9 run_on: - rhel87-small @@ -360,9 +360,9 @@ buildvariants: tags: [encryption_tag] - name: encryption-crypt_shared-rhel8-py3.13 tasks: - - name: .sharded_cluster .auth .ssl - - name: .replica_set .noauth .ssl - - name: .standalone .noauth .nossl + - name: .sharded_cluster .auth .ssl .sync_async + - name: .replica_set .noauth .ssl .sync_async + - name: .standalone .noauth .nossl .sync_async display_name: Encryption crypt_shared RHEL8 py3.13 run_on: - rhel87-small @@ -374,9 +374,9 @@ buildvariants: tags: [encryption_tag] - name: encryption-crypt_shared-rhel8-pypy3.10 tasks: - - name: .sharded_cluster .auth .ssl - - name: .replica_set .noauth .ssl - - name: .standalone .noauth .nossl + - name: .sharded_cluster .auth .ssl .sync_async + - name: .replica_set .noauth .ssl .sync_async + - name: .standalone .noauth .nossl .sync_async display_name: Encryption crypt_shared RHEL8 pypy3.10 run_on: - rhel87-small @@ -388,9 +388,9 @@ buildvariants: tags: [encryption_tag] - name: encryption-pyopenssl-rhel8-py3.9 tasks: - - name: .sharded_cluster .auth .ssl - - name: .replica_set .noauth .ssl - - name: .standalone .noauth .nossl + - name: .sharded_cluster .auth .ssl .sync_async + - name: .replica_set .noauth .ssl .sync_async + - name: .standalone .noauth .nossl .sync_async display_name: Encryption PyOpenSSL RHEL8 py3.9 run_on: - rhel87-small @@ -402,9 +402,9 @@ buildvariants: tags: [encryption_tag] - name: encryption-pyopenssl-rhel8-py3.13 tasks: - - name: .sharded_cluster .auth .ssl - - name: .replica_set .noauth .ssl - - name: .standalone .noauth .nossl + - name: .sharded_cluster .auth .ssl .sync_async + - name: .replica_set .noauth .ssl .sync_async + - name: .standalone .noauth .nossl .sync_async display_name: Encryption PyOpenSSL RHEL8 py3.13 run_on: - rhel87-small @@ -416,9 +416,9 @@ buildvariants: tags: [encryption_tag] - name: encryption-pyopenssl-rhel8-pypy3.10 tasks: - - name: .sharded_cluster .auth .ssl - - name: .replica_set .noauth .ssl - - name: .standalone .noauth .nossl + - name: .sharded_cluster .auth .ssl .sync_async + - name: .replica_set .noauth .ssl .sync_async + - name: .standalone .noauth .nossl .sync_async display_name: Encryption PyOpenSSL RHEL8 pypy3.10 run_on: - rhel87-small @@ -430,7 +430,7 @@ buildvariants: tags: [encryption_tag] - name: encryption-rhel8-py3.10 tasks: - - name: .sharded_cluster .auth .ssl + - name: .sharded_cluster .auth .ssl .sync_async display_name: Encryption RHEL8 py3.10 run_on: - rhel87-small @@ -439,7 +439,7 @@ buildvariants: PYTHON_BINARY: /opt/python/3.10/bin/python3 - name: encryption-crypt_shared-rhel8-py3.11 tasks: - - name: .replica_set .noauth .ssl + - name: .replica_set .noauth .ssl .sync_async display_name: Encryption crypt_shared RHEL8 py3.11 run_on: - rhel87-small @@ -449,7 +449,7 @@ buildvariants: PYTHON_BINARY: /opt/python/3.11/bin/python3 - name: encryption-pyopenssl-rhel8-py3.12 tasks: - - name: .standalone .noauth .nossl + - name: .standalone .noauth .nossl .sync_async display_name: Encryption PyOpenSSL RHEL8 py3.12 run_on: - rhel87-small @@ -459,7 +459,7 @@ buildvariants: PYTHON_BINARY: /opt/python/3.12/bin/python3 - name: encryption-rhel8-pypy3.9 tasks: - - name: .sharded_cluster .auth .ssl + - name: .sharded_cluster .auth .ssl .sync_async display_name: Encryption RHEL8 pypy3.9 run_on: - rhel87-small @@ -468,7 +468,7 @@ buildvariants: PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 - name: encryption-macos-py3.9 tasks: - - name: .latest .replica_set + - name: .latest .replica_set .sync_async display_name: Encryption macOS py3.9 run_on: - macos-14 @@ -479,7 +479,7 @@ buildvariants: tags: [encryption_tag] - name: encryption-macos-py3.13 tasks: - - name: .latest .replica_set + - name: .latest .replica_set .sync_async display_name: Encryption macOS py3.13 run_on: - macos-14 @@ -490,7 +490,7 @@ buildvariants: tags: [encryption_tag] - name: encryption-crypt_shared-macos-py3.9 tasks: - - name: .latest .replica_set + - name: .latest .replica_set .sync_async display_name: Encryption crypt_shared macOS py3.9 run_on: - macos-14 @@ -502,7 +502,7 @@ buildvariants: tags: [encryption_tag] - name: encryption-crypt_shared-macos-py3.13 tasks: - - name: .latest .replica_set + - name: .latest .replica_set .sync_async display_name: Encryption crypt_shared macOS py3.13 run_on: - macos-14 @@ -514,7 +514,7 @@ buildvariants: tags: [encryption_tag] - name: encryption-win64-py3.9 tasks: - - name: .latest .replica_set + - name: .latest .replica_set .sync_async display_name: Encryption Win64 py3.9 run_on: - windows-64-vsMulti-small @@ -525,7 +525,7 @@ buildvariants: tags: [encryption_tag] - name: encryption-win64-py3.13 tasks: - - name: .latest .replica_set + - name: .latest .replica_set .sync_async display_name: Encryption Win64 py3.13 run_on: - windows-64-vsMulti-small @@ -536,7 +536,7 @@ buildvariants: tags: [encryption_tag] - name: encryption-crypt_shared-win64-py3.9 tasks: - - name: .latest .replica_set + - name: .latest .replica_set .sync_async display_name: Encryption crypt_shared Win64 py3.9 run_on: - windows-64-vsMulti-small @@ -548,7 +548,7 @@ buildvariants: tags: [encryption_tag] - name: encryption-crypt_shared-win64-py3.13 tasks: - - name: .latest .replica_set + - name: .latest .replica_set .sync_async display_name: Encryption crypt_shared Win64 py3.13 run_on: - windows-64-vsMulti-small @@ -627,7 +627,7 @@ buildvariants: # Green framework tests - name: eventlet-rhel8-py3.9 tasks: - - name: .standalone .noauth .nossl + - name: .standalone .noauth .nossl .sync_async display_name: Eventlet RHEL8 py3.9 run_on: - rhel87-small @@ -638,7 +638,7 @@ buildvariants: PYTHON_BINARY: /opt/python/3.9/bin/python3 - name: gevent-rhel8-py3.9 tasks: - - name: .standalone .noauth .nossl + - name: .standalone .noauth .nossl .sync_async display_name: Gevent RHEL8 py3.9 run_on: - rhel87-small @@ -649,7 +649,7 @@ buildvariants: PYTHON_BINARY: /opt/python/3.9/bin/python3 - name: eventlet-rhel8-py3.12 tasks: - - name: .standalone .noauth .nossl + - name: .standalone .noauth .nossl .sync_async display_name: Eventlet RHEL8 py3.12 run_on: - rhel87-small @@ -660,7 +660,7 @@ buildvariants: PYTHON_BINARY: /opt/python/3.12/bin/python3 - name: gevent-rhel8-py3.12 tasks: - - name: .standalone .noauth .nossl + - name: .standalone .noauth .nossl .sync_async display_name: Gevent RHEL8 py3.12 run_on: - rhel87-small @@ -761,7 +761,7 @@ buildvariants: # No c ext tests - name: no-c-ext-rhel8-py3.9 tasks: - - name: .standalone .noauth .nossl + - name: .standalone .noauth .nossl .sync_async display_name: No C Ext RHEL8 py3.9 run_on: - rhel87-small @@ -770,7 +770,7 @@ buildvariants: PYTHON_BINARY: /opt/python/3.9/bin/python3 - name: no-c-ext-rhel8-py3.10 tasks: - - name: .replica_set .noauth .nossl + - name: .replica_set .noauth .nossl .sync_async display_name: No C Ext RHEL8 py3.10 run_on: - rhel87-small @@ -779,7 +779,7 @@ buildvariants: PYTHON_BINARY: /opt/python/3.10/bin/python3 - name: no-c-ext-rhel8-py3.11 tasks: - - name: .sharded_cluster .noauth .nossl + - name: .sharded_cluster .noauth .nossl .sync_async display_name: No C Ext RHEL8 py3.11 run_on: - rhel87-small @@ -788,7 +788,7 @@ buildvariants: PYTHON_BINARY: /opt/python/3.11/bin/python3 - name: no-c-ext-rhel8-py3.12 tasks: - - name: .standalone .noauth .nossl + - name: .standalone .noauth .nossl .sync_async display_name: No C Ext RHEL8 py3.12 run_on: - rhel87-small @@ -797,7 +797,7 @@ buildvariants: PYTHON_BINARY: /opt/python/3.12/bin/python3 - name: no-c-ext-rhel8-py3.13 tasks: - - name: .replica_set .noauth .nossl + - name: .replica_set .noauth .nossl .sync_async display_name: No C Ext RHEL8 py3.13 run_on: - rhel87-small @@ -976,8 +976,8 @@ buildvariants: # Pyopenssl tests - name: pyopenssl-macos-py3.9 tasks: - - name: .replica_set .noauth .nossl - - name: .7.0 .noauth .nossl + - name: .replica_set .noauth .nossl .sync_async + - name: .7.0 .noauth .nossl .sync_async display_name: PyOpenSSL macOS py3.9 run_on: - macos-14 @@ -987,8 +987,8 @@ buildvariants: PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 - name: pyopenssl-rhel8-py3.10 tasks: - - name: .replica_set .auth .ssl - - name: .7.0 .auth .ssl + - name: .replica_set .auth .ssl .sync_async + - name: .7.0 .auth .ssl .sync_async display_name: PyOpenSSL RHEL8 py3.10 run_on: - rhel87-small @@ -998,8 +998,8 @@ buildvariants: PYTHON_BINARY: /opt/python/3.10/bin/python3 - name: pyopenssl-rhel8-py3.11 tasks: - - name: .replica_set .auth .ssl - - name: .7.0 .auth .ssl + - name: .replica_set .auth .ssl .sync_async + - name: .7.0 .auth .ssl .sync_async display_name: PyOpenSSL RHEL8 py3.11 run_on: - rhel87-small @@ -1009,8 +1009,8 @@ buildvariants: PYTHON_BINARY: /opt/python/3.11/bin/python3 - name: pyopenssl-rhel8-py3.12 tasks: - - name: .replica_set .auth .ssl - - name: .7.0 .auth .ssl + - name: .replica_set .auth .ssl .sync_async + - name: .7.0 .auth .ssl .sync_async display_name: PyOpenSSL RHEL8 py3.12 run_on: - rhel87-small @@ -1020,8 +1020,8 @@ buildvariants: PYTHON_BINARY: /opt/python/3.12/bin/python3 - name: pyopenssl-win64-py3.13 tasks: - - name: .replica_set .auth .ssl - - name: .7.0 .auth .ssl + - name: .replica_set .auth .ssl .sync_async + - name: .7.0 .auth .ssl .sync_async display_name: PyOpenSSL Win64 py3.13 run_on: - windows-64-vsMulti-small @@ -1031,8 +1031,8 @@ buildvariants: PYTHON_BINARY: C:/python/Python313/python.exe - name: pyopenssl-rhel8-pypy3.9 tasks: - - name: .replica_set .auth .ssl - - name: .7.0 .auth .ssl + - name: .replica_set .auth .ssl .sync_async + - name: .7.0 .auth .ssl .sync_async display_name: PyOpenSSL RHEL8 pypy3.9 run_on: - rhel87-small @@ -1042,8 +1042,8 @@ buildvariants: PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 - name: pyopenssl-rhel8-pypy3.10 tasks: - - name: .replica_set .auth .ssl - - name: .7.0 .auth .ssl + - name: .replica_set .auth .ssl .sync_async + - name: .7.0 .auth .ssl .sync_async display_name: PyOpenSSL RHEL8 pypy3.10 run_on: - rhel87-small @@ -1065,9 +1065,9 @@ buildvariants: # Server tests - name: test-rhel8-py3.9-cov tasks: - - name: .standalone - - name: .replica_set - - name: .sharded_cluster + - name: .standalone .sync_async + - name: .replica_set .sync_async + - name: .sharded_cluster .sync_async display_name: Test RHEL8 py3.9 cov run_on: - rhel87-small @@ -1077,9 +1077,9 @@ buildvariants: tags: [coverage_tag] - name: test-rhel8-py3.13-cov tasks: - - name: .standalone - - name: .replica_set - - name: .sharded_cluster + - name: .standalone .sync_async + - name: .replica_set .sync_async + - name: .sharded_cluster .sync_async display_name: Test RHEL8 py3.13 cov run_on: - rhel87-small @@ -1089,9 +1089,9 @@ buildvariants: tags: [coverage_tag] - name: test-rhel8-pypy3.10-cov tasks: - - name: .standalone - - name: .replica_set - - name: .sharded_cluster + - name: .standalone .sync_async + - name: .replica_set .sync_async + - name: .sharded_cluster .sync_async display_name: Test RHEL8 pypy3.10 cov run_on: - rhel87-small @@ -1101,9 +1101,9 @@ buildvariants: tags: [coverage_tag] - name: test-rhel8-py3.10 tasks: - - name: .sharded_cluster .auth .ssl - - name: .replica_set .noauth .ssl - - name: .standalone .noauth .nossl + - name: .sharded_cluster .auth .ssl .sync_async + - name: .replica_set .noauth .ssl .sync_async + - name: .standalone .noauth .nossl .sync_async display_name: Test RHEL8 py3.10 run_on: - rhel87-small @@ -1112,9 +1112,9 @@ buildvariants: PYTHON_BINARY: /opt/python/3.10/bin/python3 - name: test-rhel8-py3.11 tasks: - - name: .sharded_cluster .auth .ssl - - name: .replica_set .noauth .ssl - - name: .standalone .noauth .nossl + - name: .sharded_cluster .auth .ssl .sync_async + - name: .replica_set .noauth .ssl .sync_async + - name: .standalone .noauth .nossl .sync_async display_name: Test RHEL8 py3.11 run_on: - rhel87-small @@ -1123,9 +1123,9 @@ buildvariants: PYTHON_BINARY: /opt/python/3.11/bin/python3 - name: test-rhel8-py3.12 tasks: - - name: .sharded_cluster .auth .ssl - - name: .replica_set .noauth .ssl - - name: .standalone .noauth .nossl + - name: .sharded_cluster .auth .ssl .sync_async + - name: .replica_set .noauth .ssl .sync_async + - name: .standalone .noauth .nossl .sync_async display_name: Test RHEL8 py3.12 run_on: - rhel87-small @@ -1134,9 +1134,9 @@ buildvariants: PYTHON_BINARY: /opt/python/3.12/bin/python3 - name: test-rhel8-pypy3.9 tasks: - - name: .sharded_cluster .auth .ssl - - name: .replica_set .noauth .ssl - - name: .standalone .noauth .nossl + - name: .sharded_cluster .auth .ssl .sync_async + - name: .replica_set .noauth .ssl .sync_async + - name: .standalone .noauth .nossl .sync_async display_name: Test RHEL8 pypy3.9 run_on: - rhel87-small @@ -1145,9 +1145,9 @@ buildvariants: PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 - name: test-macos-py3.9 tasks: - - name: .sharded_cluster .auth .ssl - - name: .replica_set .noauth .ssl - - name: .standalone .noauth .nossl + - name: .sharded_cluster .auth .ssl !.sync_async + - name: .replica_set .noauth .ssl !.sync_async + - name: .standalone .noauth .nossl !.sync_async display_name: Test macOS py3.9 run_on: - macos-14 @@ -1156,9 +1156,9 @@ buildvariants: PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 - name: test-macos-py3.13 tasks: - - name: .sharded_cluster .auth .ssl - - name: .replica_set .noauth .ssl - - name: .standalone .noauth .nossl + - name: .sharded_cluster .auth .ssl !.sync_async + - name: .replica_set .noauth .ssl !.sync_async + - name: .standalone .noauth .nossl !.sync_async display_name: Test macOS py3.13 run_on: - macos-14 @@ -1167,21 +1167,21 @@ buildvariants: PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 - name: test-macos-arm64-py3.9 tasks: - - name: .sharded_cluster .auth .ssl .6.0 - - name: .replica_set .noauth .ssl .6.0 - - name: .standalone .noauth .nossl .6.0 - - name: .sharded_cluster .auth .ssl .7.0 - - name: .replica_set .noauth .ssl .7.0 - - name: .standalone .noauth .nossl .7.0 - - name: .sharded_cluster .auth .ssl .8.0 - - name: .replica_set .noauth .ssl .8.0 - - name: .standalone .noauth .nossl .8.0 - - name: .sharded_cluster .auth .ssl .rapid - - name: .replica_set .noauth .ssl .rapid - - name: .standalone .noauth .nossl .rapid - - name: .sharded_cluster .auth .ssl .latest - - name: .replica_set .noauth .ssl .latest - - name: .standalone .noauth .nossl .latest + - name: .sharded_cluster .auth .ssl .6.0 !.sync_async + - name: .replica_set .noauth .ssl .6.0 !.sync_async + - name: .standalone .noauth .nossl .6.0 !.sync_async + - name: .sharded_cluster .auth .ssl .7.0 !.sync_async + - name: .replica_set .noauth .ssl .7.0 !.sync_async + - name: .standalone .noauth .nossl .7.0 !.sync_async + - name: .sharded_cluster .auth .ssl .8.0 !.sync_async + - name: .replica_set .noauth .ssl .8.0 !.sync_async + - name: .standalone .noauth .nossl .8.0 !.sync_async + - name: .sharded_cluster .auth .ssl .rapid !.sync_async + - name: .replica_set .noauth .ssl .rapid !.sync_async + - name: .standalone .noauth .nossl .rapid !.sync_async + - name: .sharded_cluster .auth .ssl .latest !.sync_async + - name: .replica_set .noauth .ssl .latest !.sync_async + - name: .standalone .noauth .nossl .latest !.sync_async display_name: Test macOS Arm64 py3.9 run_on: - macos-14-arm64 @@ -1190,21 +1190,21 @@ buildvariants: PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 - name: test-macos-arm64-py3.13 tasks: - - name: .sharded_cluster .auth .ssl .6.0 - - name: .replica_set .noauth .ssl .6.0 - - name: .standalone .noauth .nossl .6.0 - - name: .sharded_cluster .auth .ssl .7.0 - - name: .replica_set .noauth .ssl .7.0 - - name: .standalone .noauth .nossl .7.0 - - name: .sharded_cluster .auth .ssl .8.0 - - name: .replica_set .noauth .ssl .8.0 - - name: .standalone .noauth .nossl .8.0 - - name: .sharded_cluster .auth .ssl .rapid - - name: .replica_set .noauth .ssl .rapid - - name: .standalone .noauth .nossl .rapid - - name: .sharded_cluster .auth .ssl .latest - - name: .replica_set .noauth .ssl .latest - - name: .standalone .noauth .nossl .latest + - name: .sharded_cluster .auth .ssl .6.0 !.sync_async + - name: .replica_set .noauth .ssl .6.0 !.sync_async + - name: .standalone .noauth .nossl .6.0 !.sync_async + - name: .sharded_cluster .auth .ssl .7.0 !.sync_async + - name: .replica_set .noauth .ssl .7.0 !.sync_async + - name: .standalone .noauth .nossl .7.0 !.sync_async + - name: .sharded_cluster .auth .ssl .8.0 !.sync_async + - name: .replica_set .noauth .ssl .8.0 !.sync_async + - name: .standalone .noauth .nossl .8.0 !.sync_async + - name: .sharded_cluster .auth .ssl .rapid !.sync_async + - name: .replica_set .noauth .ssl .rapid !.sync_async + - name: .standalone .noauth .nossl .rapid !.sync_async + - name: .sharded_cluster .auth .ssl .latest !.sync_async + - name: .replica_set .noauth .ssl .latest !.sync_async + - name: .standalone .noauth .nossl .latest !.sync_async display_name: Test macOS Arm64 py3.13 run_on: - macos-14-arm64 @@ -1213,9 +1213,9 @@ buildvariants: PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 - name: test-win64-py3.9 tasks: - - name: .sharded_cluster .auth .ssl - - name: .replica_set .noauth .ssl - - name: .standalone .noauth .nossl + - name: .sharded_cluster .auth .ssl !.sync_async + - name: .replica_set .noauth .ssl !.sync_async + - name: .standalone .noauth .nossl !.sync_async display_name: Test Win64 py3.9 run_on: - windows-64-vsMulti-small @@ -1224,9 +1224,9 @@ buildvariants: PYTHON_BINARY: C:/python/Python39/python.exe - name: test-win64-py3.13 tasks: - - name: .sharded_cluster .auth .ssl - - name: .replica_set .noauth .ssl - - name: .standalone .noauth .nossl + - name: .sharded_cluster .auth .ssl !.sync_async + - name: .replica_set .noauth .ssl !.sync_async + - name: .standalone .noauth .nossl !.sync_async display_name: Test Win64 py3.13 run_on: - windows-64-vsMulti-small @@ -1235,9 +1235,9 @@ buildvariants: PYTHON_BINARY: C:/python/Python313/python.exe - name: test-win32-py3.9 tasks: - - name: .sharded_cluster .auth .ssl - - name: .replica_set .noauth .ssl - - name: .standalone .noauth .nossl + - name: .sharded_cluster .auth .ssl !.sync_async + - name: .replica_set .noauth .ssl !.sync_async + - name: .standalone .noauth .nossl !.sync_async display_name: Test Win32 py3.9 run_on: - windows-64-vsMulti-small @@ -1246,9 +1246,9 @@ buildvariants: PYTHON_BINARY: C:/python/32/Python39/python.exe - name: test-win32-py3.13 tasks: - - name: .sharded_cluster .auth .ssl - - name: .replica_set .noauth .ssl - - name: .standalone .noauth .nossl + - name: .sharded_cluster .auth .ssl !.sync_async + - name: .replica_set .noauth .ssl !.sync_async + - name: .standalone .noauth .nossl !.sync_async display_name: Test Win32 py3.13 run_on: - windows-64-vsMulti-small diff --git a/.evergreen/scripts/generate_config.py b/.evergreen/scripts/generate_config.py index eefd04b040..59760e7664 100644 --- a/.evergreen/scripts/generate_config.py +++ b/.evergreen/scripts/generate_config.py @@ -41,7 +41,7 @@ ".replica_set .noauth .ssl", ".standalone .noauth .nossl", ] -SYNCS = ["sync", "async"] +SYNCS = ["sync", "async", "sync_async"] DISPLAY_LOOKUP = dict( ssl=dict(ssl="SSL", nossl="NoSSL"), auth=dict(auth="Auth", noauth="NoAuth"), @@ -246,7 +246,7 @@ def create_server_variants() -> list[BuildVariant]: expansions = dict(COVERAGE="coverage") display_name = get_display_name("Test", host, python=python, **expansions) variant = create_variant( - [f".{t}" for t in TOPOLOGIES], + [f".{t} .sync_async" for t in TOPOLOGIES], display_name, python=python, host=host, @@ -260,7 +260,7 @@ def create_server_variants() -> list[BuildVariant]: display_name = f"Test {host}" display_name = get_display_name("Test", host, python=python) variant = create_variant( - SUB_TASKS, + [f"{t} .sync_async" for t in SUB_TASKS], display_name, python=python, host=host, @@ -271,12 +271,12 @@ def create_server_variants() -> list[BuildVariant]: # Test a subset on each of the other platforms. for host in ("macos", "macos-arm64", "win64", "win32"): for python in MIN_MAX_PYTHON: - tasks = SUB_TASKS + tasks = [f"{t} !.sync_async" for t in SUB_TASKS] # MacOS arm64 only works on server versions 6.0+ if host == "macos-arm64": tasks = [] for version in get_versions_from("6.0"): - tasks.extend(f"{t} .{version}" for t in SUB_TASKS) + tasks.extend(f"{t} .{version} !.sync_async" for t in SUB_TASKS) expansions = dict(SKIP_CSOT_TESTS="true") display_name = get_display_name("Test", host, python=python, **expansions) variant = create_variant( @@ -312,7 +312,7 @@ def get_encryption_expansions(encryption): expansions = get_encryption_expansions(encryption) display_name = get_display_name(encryption, host, python=python, **expansions) variant = create_variant( - SUB_TASKS, + [f"{t} .sync_async" for t in SUB_TASKS], display_name, python=python, host=host, @@ -327,7 +327,7 @@ def get_encryption_expansions(encryption): expansions = get_encryption_expansions(encryption) display_name = get_display_name(encryption, host, python=python, **expansions) variant = create_variant( - [task], + [f"{task} .sync_async"], display_name, python=python, host=host, @@ -337,7 +337,7 @@ def get_encryption_expansions(encryption): # Test on macos and linux on one server version and topology for min and max python. encryptions = ["Encryption", "Encryption crypt_shared"] - task_names = [".latest .replica_set"] + task_names = [".latest .replica_set .sync_async"] for host, encryption, python in product(["macos", "win64"], encryptions, MIN_MAX_PYTHON): expansions = get_encryption_expansions(encryption) display_name = get_display_name(encryption, host, python=python, **expansions) @@ -379,7 +379,7 @@ def create_compression_variants(): # Compression tests - standalone versions of each server, across python versions, with and without c extensions. # PyPy interpreters are always tested without extensions. host = "rhel8" - base_task = ".standalone .noauth .nossl" + base_task = ".standalone .noauth .nossl .sync_async" task_names = dict(snappy=[base_task], zlib=[base_task], zstd=[f"{base_task} !.4.0"]) variants = [] for ind, (compressor, c_ext) in enumerate(product(["snappy", "zlib", "zstd"], C_EXTS)): @@ -455,7 +455,7 @@ def create_pyopenssl_variants(): display_name = get_display_name(base_name, host, python=python) variant = create_variant( - [f".replica_set .{auth} .{ssl}", f".7.0 .{auth} .{ssl}"], + [f".replica_set .{auth} .{ssl} .sync_async", f".7.0 .{auth} .{ssl} .sync_async"], display_name, python=python, host=host, @@ -475,12 +475,12 @@ def create_storage_engine_tests(): python = CPYTHONS[0] expansions = dict(STORAGE_ENGINE=engine.lower()) if engine == engines[0]: - tasks = [f".standalone .noauth .nossl .{v}" for v in ALL_VERSIONS] + tasks = [f".standalone .noauth .nossl .{v} .sync_async" for v in ALL_VERSIONS] else: # MongoDB 4.2 drops support for MMAPv1 versions = get_versions_until("4.0") - tasks = [f".standalone .{v} .noauth .nossl" for v in versions] + [ - f".replica_set .{v} .noauth .nossl" for v in versions + tasks = [f".standalone .{v} .noauth .nossl .sync_async" for v in versions] + [ + f".replica_set .{v} .noauth .nossl .sync_async" for v in versions ] display_name = get_display_name(f"Storage {engine}", host, python=python) variant = create_variant( @@ -493,7 +493,7 @@ def create_storage_engine_tests(): def create_versioned_api_tests(): host = "rhel8" tags = ["versionedApi_tag"] - tasks = [f".standalone .{v} .noauth .nossl" for v in get_versions_from("5.0")] + tasks = [f".standalone .{v} .noauth .nossl .sync_async" for v in get_versions_from("5.0")] variants = [] types = ["require v1", "accept v2"] @@ -524,7 +524,7 @@ def create_versioned_api_tests(): def create_green_framework_variants(): variants = [] - tasks = [".standalone .noauth .nossl"] + tasks = [".standalone .noauth .nossl .sync_async"] host = "rhel8" for python, framework in product([CPYTHONS[0], CPYTHONS[-2]], ["eventlet", "gevent"]): expansions = dict(GREEN_FRAMEWORK=framework, AUTH="auth", SSL="ssl") @@ -540,7 +540,7 @@ def create_no_c_ext_variants(): variants = [] host = "rhel8" for python, topology in zip_cycle(CPYTHONS, TOPOLOGIES): - tasks = [f".{topology} .noauth .nossl"] + tasks = [f".{topology} .noauth .nossl .sync_async"] expansions = dict() handle_c_ext(C_EXTS[0], expansions) display_name = get_display_name("No C Ext", host, python=python) @@ -590,7 +590,7 @@ def create_disable_test_commands_variants(): expansions = dict(AUTH="auth", SSL="ssl", DISABLE_TEST_COMMANDS="1") python = CPYTHONS[0] display_name = get_display_name("Disable test commands", host, python=python) - tasks = [".latest"] + tasks = [".latest .sync_async"] return [create_variant(tasks, display_name, host=host, python=python, expansions=expansions)] @@ -717,7 +717,7 @@ def create_alternative_hosts_variants(): host = "rhel7" variants.append( create_variant( - [".5.0 .standalone"], + [".5.0 .standalone !.sync_async"], get_display_name("OpenSSL 1.0.2", "rhel7", python=CPYTHONS[0], **expansions), host=host, python=CPYTHONS[0], @@ -731,7 +731,7 @@ def create_alternative_hosts_variants(): for host, host_name in zip(hosts, host_names): variants.append( create_variant( - [".6.0 .standalone"], + [".6.0 .standalone !.sync_async"], display_name=get_display_name(f"Other hosts {host_name}", **expansions), expansions=expansions, batchtime=batchtime, @@ -758,11 +758,16 @@ def create_server_tasks(): SSL=ssl, ) bootstrap_func = FunctionCall(func="bootstrap mongo-orchestration", vars=bootstrap_vars) + test_suites = "" + if sync == "sync": + test_suites = "default" + elif sync == "async": + test_suites = "default_async" test_vars = dict( AUTH=auth, SSL=ssl, SYNC=sync, - TEST_SUITES="default" if sync == "sync" else "default_async", + TEST_SUITES=test_suites, ) test_func = FunctionCall(func="run tests", vars=test_vars) tasks.append(EvgTask(name=name, tags=tags, commands=[bootstrap_func, test_func])) From 5c1c24101802a3827260246a06e018b3c70202d0 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Tue, 29 Oct 2024 08:25:23 -0500 Subject: [PATCH 075/182] PYTHON-4909 Use ubuntu for Atlas Data Lake tests (#1969) --- .evergreen/generated_configs/variants.yml | 28 +++++++++++++---------- .evergreen/scripts/generate_config.py | 4 ++-- 2 files changed, 18 insertions(+), 14 deletions(-) diff --git a/.evergreen/generated_configs/variants.yml b/.evergreen/generated_configs/variants.yml index 240b237fdc..0a4e5cfb14 100644 --- a/.evergreen/generated_configs/variants.yml +++ b/.evergreen/generated_configs/variants.yml @@ -66,39 +66,43 @@ buildvariants: PYTHON_BINARY: /opt/python/3.13/bin/python3 # Atlas data lake tests - - name: atlas-data-lake-rhel8-py3.9-no-c + - name: atlas-data-lake-ubuntu-22-py3.9-auth-no-c tasks: - name: atlas-data-lake-tests - display_name: Atlas Data Lake RHEL8 py3.9 No C + display_name: Atlas Data Lake Ubuntu-22 py3.9 Auth No C run_on: - - rhel87-small + - ubuntu2204-small expansions: + AUTH: auth NO_EXT: "1" PYTHON_BINARY: /opt/python/3.9/bin/python3 - - name: atlas-data-lake-rhel8-py3.9 + - name: atlas-data-lake-ubuntu-22-py3.9-auth tasks: - name: atlas-data-lake-tests - display_name: Atlas Data Lake RHEL8 py3.9 + display_name: Atlas Data Lake Ubuntu-22 py3.9 Auth run_on: - - rhel87-small + - ubuntu2204-small expansions: + AUTH: auth PYTHON_BINARY: /opt/python/3.9/bin/python3 - - name: atlas-data-lake-rhel8-py3.13-no-c + - name: atlas-data-lake-ubuntu-22-py3.13-auth-no-c tasks: - name: atlas-data-lake-tests - display_name: Atlas Data Lake RHEL8 py3.13 No C + display_name: Atlas Data Lake Ubuntu-22 py3.13 Auth No C run_on: - - rhel87-small + - ubuntu2204-small expansions: + AUTH: auth NO_EXT: "1" PYTHON_BINARY: /opt/python/3.13/bin/python3 - - name: atlas-data-lake-rhel8-py3.13 + - name: atlas-data-lake-ubuntu-22-py3.13-auth tasks: - name: atlas-data-lake-tests - display_name: Atlas Data Lake RHEL8 py3.13 + display_name: Atlas Data Lake Ubuntu-22 py3.13 Auth run_on: - - rhel87-small + - ubuntu2204-small expansions: + AUTH: auth PYTHON_BINARY: /opt/python/3.13/bin/python3 # Aws auth tests diff --git a/.evergreen/scripts/generate_config.py b/.evergreen/scripts/generate_config.py index 59760e7664..9abcc6516a 100644 --- a/.evergreen/scripts/generate_config.py +++ b/.evergreen/scripts/generate_config.py @@ -553,10 +553,10 @@ def create_no_c_ext_variants(): def create_atlas_data_lake_variants(): variants = [] - host = "rhel8" + host = "ubuntu22" for python, c_ext in product(MIN_MAX_PYTHON, C_EXTS): tasks = ["atlas-data-lake-tests"] - expansions = dict() + expansions = dict(AUTH="auth") handle_c_ext(c_ext, expansions) display_name = get_display_name("Atlas Data Lake", host, python=python, **expansions) variant = create_variant( From 00c29600decda0081959d532fc8f882a16554fc0 Mon Sep 17 00:00:00 2001 From: Noah Stapp Date: Tue, 29 Oct 2024 11:34:06 -0400 Subject: [PATCH 076/182] PYTHON-4766 - Fix logic for determining whether to populate BulkWriteException.partialResult (#1980) --- test/crud/unified/client-bulkWrite-partialResults.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/crud/unified/client-bulkWrite-partialResults.json b/test/crud/unified/client-bulkWrite-partialResults.json index b35e94a2ea..1b75e37834 100644 --- a/test/crud/unified/client-bulkWrite-partialResults.json +++ b/test/crud/unified/client-bulkWrite-partialResults.json @@ -486,7 +486,7 @@ ] }, { - "description": "partialResult is set when first operation fails during an unordered bulk write (summary)", + "description": "partialResult is set when second operation fails during an unordered bulk write (summary)", "operations": [ { "object": "client0", From dfb6a9a4f337c780be832a2f6fe84fe292e24015 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Tue, 29 Oct 2024 11:08:22 -0500 Subject: [PATCH 077/182] PYTHON-4209 Ensure that no error is raised for unknown auth mechanism (#1981) --- test/mockupdb/test_handshake.py | 33 +++++++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/test/mockupdb/test_handshake.py b/test/mockupdb/test_handshake.py index 7cc3017c8f..752c4f8421 100644 --- a/test/mockupdb/test_handshake.py +++ b/test/mockupdb/test_handshake.py @@ -229,6 +229,39 @@ def test_client_handshake_saslSupportedMechs(self): future() return + def test_client_handshake_saslSupportedMechs_unknown(self): + server = MockupDB() + server.run() + self.addCleanup(server.stop) + + primary_response = OpReply( + "ismaster", + True, + minWireVersion=2, + maxWireVersion=MIN_SUPPORTED_WIRE_VERSION, + saslSupportedMechs=["SCRAM-SHA-256", "does_not_exist"], + ) + client = MongoClient( + server.uri, authmechanism="PLAIN", username="username", password="password" + ) + + self.addCleanup(client.close) + + # New monitoring connections send data during handshake. + heartbeat = server.receives("ismaster") + heartbeat.ok(primary_response) + + future = go(client.db.command, "whatever") + for request in server: + if request.matches("ismaster"): + request.ok(primary_response) + elif request.matches("saslStart"): + request.ok("saslStart", True, conversationId=1, payload=b"", done=True, ok=1) + else: + request.ok() + future() + return + def test_handshake_load_balanced(self): self.hello_with_option_helper(OpMsg, loadBalanced=True) with self.assertRaisesRegex(AssertionError, "does not match"): From 2f1227c504064d6859cb05d567999262f99b7937 Mon Sep 17 00:00:00 2001 From: Noah Stapp Date: Tue, 29 Oct 2024 12:28:33 -0400 Subject: [PATCH 078/182] =?UTF-8?q?PYTHON-4807=20-=20Specify=20how=20to=20?= =?UTF-8?q?handle=20unacknowledged+(ordered|verbose|m=E2=80=A6=20(#1979)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- pymongo/asynchronous/mongo_client.py | 7 +++ pymongo/synchronous/mongo_client.py | 7 +++ test/asynchronous/test_client_bulk_write.py | 46 ++++++++++++++- .../unacknowledged-client-bulkWrite.json | 5 +- .../crud/unified/client-bulkWrite-errors.json | 58 +++++++++++++++++++ test/test_client_bulk_write.py | 42 +++++++++++++- 6 files changed, 159 insertions(+), 6 deletions(-) diff --git a/pymongo/asynchronous/mongo_client.py b/pymongo/asynchronous/mongo_client.py index 4e09efe401..a71e4cb5cd 100644 --- a/pymongo/asynchronous/mongo_client.py +++ b/pymongo/asynchronous/mongo_client.py @@ -2354,6 +2354,13 @@ async def bulk_write( if not write_concern: write_concern = self.write_concern + if write_concern and not write_concern.acknowledged and verbose_results: + raise InvalidOperation( + "Cannot request unacknowledged write concern and verbose results" + ) + elif write_concern and not write_concern.acknowledged and ordered: + raise InvalidOperation("Cannot request unacknowledged write concern and ordered writes") + common.validate_list("models", models) blk = _AsyncClientBulk( diff --git a/pymongo/synchronous/mongo_client.py b/pymongo/synchronous/mongo_client.py index 815446bb2c..24696f0c8e 100644 --- a/pymongo/synchronous/mongo_client.py +++ b/pymongo/synchronous/mongo_client.py @@ -2342,6 +2342,13 @@ def bulk_write( if not write_concern: write_concern = self.write_concern + if write_concern and not write_concern.acknowledged and verbose_results: + raise InvalidOperation( + "Cannot request unacknowledged write concern and verbose results" + ) + elif write_concern and not write_concern.acknowledged and ordered: + raise InvalidOperation("Cannot request unacknowledged write concern and ordered writes") + common.validate_list("models", models) blk = _ClientBulk( diff --git a/test/asynchronous/test_client_bulk_write.py b/test/asynchronous/test_client_bulk_write.py index 9464337809..5f6b3353e8 100644 --- a/test/asynchronous/test_client_bulk_write.py +++ b/test/asynchronous/test_client_bulk_write.py @@ -401,12 +401,16 @@ async def test_returns_error_if_unacknowledged_too_large_insert(self): # Insert document. models_insert = [InsertOne(namespace="db.coll", document={"a": b_repeated})] with self.assertRaises(DocumentTooLarge): - await client.bulk_write(models=models_insert, write_concern=WriteConcern(w=0)) + await client.bulk_write( + models=models_insert, ordered=False, write_concern=WriteConcern(w=0) + ) # Replace document. models_replace = [ReplaceOne(namespace="db.coll", filter={}, replacement={"a": b_repeated})] with self.assertRaises(DocumentTooLarge): - await client.bulk_write(models=models_replace, write_concern=WriteConcern(w=0)) + await client.bulk_write( + models=models_replace, ordered=False, write_concern=WriteConcern(w=0) + ) async def _setup_namespace_test_models(self): # See prose test specification below for details on these calculations. @@ -590,6 +594,44 @@ async def test_upserted_result(self): self.assertEqual(result.update_results[1].did_upsert, True) self.assertEqual(result.update_results[2].did_upsert, False) + @async_client_context.require_version_min(8, 0, 0, -24) + @async_client_context.require_no_serverless + async def test_15_unacknowledged_write_across_batches(self): + listener = OvertCommandListener() + client = await self.async_rs_or_single_client(event_listeners=[listener]) + + collection = client.db["coll"] + self.addAsyncCleanup(collection.drop) + await collection.drop() + await client.db.command({"create": "db.coll"}) + + b_repeated = "b" * (self.max_bson_object_size - 500) + models = [ + InsertOne(namespace="db.coll", document={"a": b_repeated}) + for _ in range(int(self.max_message_size_bytes / self.max_bson_object_size) + 1) + ] + + listener.reset() + + res = await client.bulk_write(models, ordered=False, write_concern=WriteConcern(w=0)) + self.assertEqual(False, res.acknowledged) + + events = listener.started_events + self.assertEqual(2, len(events)) + self.assertEqual( + int(self.max_message_size_bytes / self.max_bson_object_size), + len(events[0].command["ops"]), + ) + self.assertEqual(1, len(events[1].command["ops"])) + self.assertEqual(events[0].operation_id, events[1].operation_id) + self.assertEqual({"w": 0}, events[0].command["writeConcern"]) + self.assertEqual({"w": 0}, events[1].command["writeConcern"]) + + self.assertEqual( + int(self.max_message_size_bytes / self.max_bson_object_size) + 1, + await collection.count_documents({}), + ) + # https://github.com/mongodb/specifications/blob/master/source/client-side-operations-timeout/tests/README.md#11-multi-batch-bulkwrites class TestClientBulkWriteCSOT(AsyncIntegrationTest): diff --git a/test/command_monitoring/unacknowledged-client-bulkWrite.json b/test/command_monitoring/unacknowledged-client-bulkWrite.json index b30e1540f4..61bb00726c 100644 --- a/test/command_monitoring/unacknowledged-client-bulkWrite.json +++ b/test/command_monitoring/unacknowledged-client-bulkWrite.json @@ -91,7 +91,8 @@ } } } - ] + ], + "ordered": false }, "expectResult": { "insertedCount": { @@ -158,7 +159,7 @@ "command": { "bulkWrite": 1, "errorsOnly": true, - "ordered": true, + "ordered": false, "ops": [ { "insert": 0, diff --git a/test/crud/unified/client-bulkWrite-errors.json b/test/crud/unified/client-bulkWrite-errors.json index 8cc45bb5f2..015bd95c99 100644 --- a/test/crud/unified/client-bulkWrite-errors.json +++ b/test/crud/unified/client-bulkWrite-errors.json @@ -450,6 +450,64 @@ } } ] + }, + { + "description": "Requesting unacknowledged write with verboseResults is a client-side error", + "operations": [ + { + "name": "clientBulkWrite", + "object": "client0", + "arguments": { + "models": [ + { + "insertOne": { + "namespace": "crud-tests.coll0", + "document": { + "_id": 10 + } + } + } + ], + "verboseResults": true, + "ordered": false, + "writeConcern": { + "w": 0 + } + }, + "expectError": { + "isClientError": true, + "errorContains": "Cannot request unacknowledged write concern and verbose results" + } + } + ] + }, + { + "description": "Requesting unacknowledged write with ordered is a client-side error", + "operations": [ + { + "name": "clientBulkWrite", + "object": "client0", + "arguments": { + "models": [ + { + "insertOne": { + "namespace": "crud-tests.coll0", + "document": { + "_id": 10 + } + } + } + ], + "writeConcern": { + "w": 0 + } + }, + "expectError": { + "isClientError": true, + "errorContains": "Cannot request unacknowledged write concern and ordered writes" + } + } + ] } ] } diff --git a/test/test_client_bulk_write.py b/test/test_client_bulk_write.py index 58b5015dd2..733970dd57 100644 --- a/test/test_client_bulk_write.py +++ b/test/test_client_bulk_write.py @@ -401,12 +401,12 @@ def test_returns_error_if_unacknowledged_too_large_insert(self): # Insert document. models_insert = [InsertOne(namespace="db.coll", document={"a": b_repeated})] with self.assertRaises(DocumentTooLarge): - client.bulk_write(models=models_insert, write_concern=WriteConcern(w=0)) + client.bulk_write(models=models_insert, ordered=False, write_concern=WriteConcern(w=0)) # Replace document. models_replace = [ReplaceOne(namespace="db.coll", filter={}, replacement={"a": b_repeated})] with self.assertRaises(DocumentTooLarge): - client.bulk_write(models=models_replace, write_concern=WriteConcern(w=0)) + client.bulk_write(models=models_replace, ordered=False, write_concern=WriteConcern(w=0)) def _setup_namespace_test_models(self): # See prose test specification below for details on these calculations. @@ -590,6 +590,44 @@ def test_upserted_result(self): self.assertEqual(result.update_results[1].did_upsert, True) self.assertEqual(result.update_results[2].did_upsert, False) + @client_context.require_version_min(8, 0, 0, -24) + @client_context.require_no_serverless + def test_15_unacknowledged_write_across_batches(self): + listener = OvertCommandListener() + client = self.rs_or_single_client(event_listeners=[listener]) + + collection = client.db["coll"] + self.addCleanup(collection.drop) + collection.drop() + client.db.command({"create": "db.coll"}) + + b_repeated = "b" * (self.max_bson_object_size - 500) + models = [ + InsertOne(namespace="db.coll", document={"a": b_repeated}) + for _ in range(int(self.max_message_size_bytes / self.max_bson_object_size) + 1) + ] + + listener.reset() + + res = client.bulk_write(models, ordered=False, write_concern=WriteConcern(w=0)) + self.assertEqual(False, res.acknowledged) + + events = listener.started_events + self.assertEqual(2, len(events)) + self.assertEqual( + int(self.max_message_size_bytes / self.max_bson_object_size), + len(events[0].command["ops"]), + ) + self.assertEqual(1, len(events[1].command["ops"])) + self.assertEqual(events[0].operation_id, events[1].operation_id) + self.assertEqual({"w": 0}, events[0].command["writeConcern"]) + self.assertEqual({"w": 0}, events[1].command["writeConcern"]) + + self.assertEqual( + int(self.max_message_size_bytes / self.max_bson_object_size) + 1, + collection.count_documents({}), + ) + # https://github.com/mongodb/specifications/blob/master/source/client-side-operations-timeout/tests/README.md#11-multi-batch-bulkwrites class TestClientBulkWriteCSOT(IntegrationTest): From 9a11b78fdfe25de9e845e29503cc296b7f82dffa Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Wed, 30 Oct 2024 12:49:20 -0500 Subject: [PATCH 079/182] PYTHON-4209 Fix test for ensure that no error is raised for unknown auth mechanism (#1982) --- test/mockupdb/test_handshake.py | 39 ++++----------------------------- 1 file changed, 4 insertions(+), 35 deletions(-) diff --git a/test/mockupdb/test_handshake.py b/test/mockupdb/test_handshake.py index 752c4f8421..c2c978c4ad 100644 --- a/test/mockupdb/test_handshake.py +++ b/test/mockupdb/test_handshake.py @@ -218,50 +218,19 @@ def test_client_handshake_saslSupportedMechs(self): request.ok( "ismaster", True, - saslSupportedMechs=["SCRAM-SHA-256"], + # Unsupported auth mech should be ignored. + saslSupportedMechs=["SCRAM-SHA-256", "does_not_exist"], speculativeAuthenticate=auth, minWireVersion=2, maxWireVersion=MIN_SUPPORTED_WIRE_VERSION, ) # Authentication should immediately fail with: # OperationFailure: Server returned an invalid nonce. - with self.assertRaises(OperationFailure): + with self.assertRaises(OperationFailure) as cm: future() + self.assertEqual(str(cm.exception), "Server returned an invalid nonce.") return - def test_client_handshake_saslSupportedMechs_unknown(self): - server = MockupDB() - server.run() - self.addCleanup(server.stop) - - primary_response = OpReply( - "ismaster", - True, - minWireVersion=2, - maxWireVersion=MIN_SUPPORTED_WIRE_VERSION, - saslSupportedMechs=["SCRAM-SHA-256", "does_not_exist"], - ) - client = MongoClient( - server.uri, authmechanism="PLAIN", username="username", password="password" - ) - - self.addCleanup(client.close) - - # New monitoring connections send data during handshake. - heartbeat = server.receives("ismaster") - heartbeat.ok(primary_response) - - future = go(client.db.command, "whatever") - for request in server: - if request.matches("ismaster"): - request.ok(primary_response) - elif request.matches("saslStart"): - request.ok("saslStart", True, conversationId=1, payload=b"", done=True, ok=1) - else: - request.ok() - future() - return - def test_handshake_load_balanced(self): self.hello_with_option_helper(OpMsg, loadBalanced=True) with self.assertRaisesRegex(AssertionError, "does not match"): From ad3292e39b0490db354735a803911bfd6943ee65 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Wed, 30 Oct 2024 12:57:31 -0500 Subject: [PATCH 080/182] PYTHON-4922 Remove Support for MONGODB-CR Authentication (#1978) --- doc/changelog.rst | 1 + doc/examples/authentication.rst | 21 +--------- pymongo/asynchronous/auth.py | 16 -------- pymongo/auth_shared.py | 1 - pymongo/synchronous/auth.py | 16 -------- test/auth/legacy/connection-string.json | 41 ------------------- test/connection_string/test/valid-auth.json | 27 ++---------- .../connection_string/test/valid-options.json | 4 +- test/test_uri_parser.py | 13 +++--- 9 files changed, 13 insertions(+), 127 deletions(-) diff --git a/doc/changelog.rst b/doc/changelog.rst index 29fddb7b5c..94d991868d 100644 --- a/doc/changelog.rst +++ b/doc/changelog.rst @@ -12,6 +12,7 @@ PyMongo 4.11 brings a number of changes including: - Dropped support for Python 3.8. - Dropped support for MongoDB 3.6. +- Dropped support for the MONGODB-CR authenticate mechanism, which is no longer supported by MongoDB 4.0+. - Added support for free-threaded Python with the GIL disabled. For more information see: `Free-threaded CPython `_. - :attr:`~pymongo.asynchronous.mongo_client.AsyncMongoClient.address` and diff --git a/doc/examples/authentication.rst b/doc/examples/authentication.rst index 6c89910f3c..b319df814c 100644 --- a/doc/examples/authentication.rst +++ b/doc/examples/authentication.rst @@ -76,24 +76,6 @@ For best performance on Python versions older than 2.7.8 install `backports.pbkd .. _backports.pbkdf2: https://pypi.python.org/pypi/backports.pbkdf2/ -MONGODB-CR ----------- - -.. warning:: MONGODB-CR was deprecated with the release of MongoDB 3.6 and - is no longer supported by MongoDB 4.0. - -Before MongoDB 3.0 the default authentication mechanism was MONGODB-CR, -the "MongoDB Challenge-Response" protocol:: - - >>> from pymongo import MongoClient - >>> client = MongoClient('example.com', - ... username='user', - ... password='password', - ... authMechanism='MONGODB-CR') - >>> - >>> uri = "mongodb://user:password@example.com/?authSource=the_database&authMechanism=MONGODB-CR" - >>> client = MongoClient(uri) - Default Authentication Mechanism -------------------------------- @@ -221,8 +203,7 @@ SASL PLAIN (RFC 4616) MongoDB Enterprise Edition version 2.6 and newer support the SASL PLAIN authentication mechanism, initially intended for delegating authentication -to an LDAP server. Using the PLAIN mechanism is very similar to MONGODB-CR. -These examples use the $external virtual database for LDAP support:: +to an LDAP server. These examples use the $external virtual database for LDAP support:: >>> from pymongo import MongoClient >>> uri = "mongodb://user:password@example.com/?authMechanism=PLAIN" diff --git a/pymongo/asynchronous/auth.py b/pymongo/asynchronous/auth.py index 1fb28f6c49..fc563ec48f 100644 --- a/pymongo/asynchronous/auth.py +++ b/pymongo/asynchronous/auth.py @@ -329,21 +329,6 @@ async def _authenticate_x509(credentials: MongoCredential, conn: AsyncConnection await conn.command("$external", cmd) -async def _authenticate_mongo_cr(credentials: MongoCredential, conn: AsyncConnection) -> None: - """Authenticate using MONGODB-CR.""" - source = credentials.source - username = credentials.username - password = credentials.password - # Get a nonce - response = await conn.command(source, {"getnonce": 1}) - nonce = response["nonce"] - key = _auth_key(nonce, username, password) - - # Actually authenticate - query = {"authenticate": 1, "user": username, "nonce": nonce, "key": key} - await conn.command(source, query) - - async def _authenticate_default(credentials: MongoCredential, conn: AsyncConnection) -> None: if conn.max_wire_version >= 7: if conn.negotiated_mechs: @@ -365,7 +350,6 @@ async def _authenticate_default(credentials: MongoCredential, conn: AsyncConnect _AUTH_MAP: Mapping[str, Callable[..., Coroutine[Any, Any, None]]] = { "GSSAPI": _authenticate_gssapi, - "MONGODB-CR": _authenticate_mongo_cr, "MONGODB-X509": _authenticate_x509, "MONGODB-AWS": _authenticate_aws, "MONGODB-OIDC": _authenticate_oidc, # type:ignore[dict-item] diff --git a/pymongo/auth_shared.py b/pymongo/auth_shared.py index 7e3acd9dfb..11d08ffe9c 100644 --- a/pymongo/auth_shared.py +++ b/pymongo/auth_shared.py @@ -34,7 +34,6 @@ MECHANISMS = frozenset( [ "GSSAPI", - "MONGODB-CR", "MONGODB-OIDC", "MONGODB-X509", "MONGODB-AWS", diff --git a/pymongo/synchronous/auth.py b/pymongo/synchronous/auth.py index 9a3477679d..7b370843c5 100644 --- a/pymongo/synchronous/auth.py +++ b/pymongo/synchronous/auth.py @@ -326,21 +326,6 @@ def _authenticate_x509(credentials: MongoCredential, conn: Connection) -> None: conn.command("$external", cmd) -def _authenticate_mongo_cr(credentials: MongoCredential, conn: Connection) -> None: - """Authenticate using MONGODB-CR.""" - source = credentials.source - username = credentials.username - password = credentials.password - # Get a nonce - response = conn.command(source, {"getnonce": 1}) - nonce = response["nonce"] - key = _auth_key(nonce, username, password) - - # Actually authenticate - query = {"authenticate": 1, "user": username, "nonce": nonce, "key": key} - conn.command(source, query) - - def _authenticate_default(credentials: MongoCredential, conn: Connection) -> None: if conn.max_wire_version >= 7: if conn.negotiated_mechs: @@ -360,7 +345,6 @@ def _authenticate_default(credentials: MongoCredential, conn: Connection) -> Non _AUTH_MAP: Mapping[str, Callable[..., None]] = { "GSSAPI": _authenticate_gssapi, - "MONGODB-CR": _authenticate_mongo_cr, "MONGODB-X509": _authenticate_x509, "MONGODB-AWS": _authenticate_aws, "MONGODB-OIDC": _authenticate_oidc, # type:ignore[dict-item] diff --git a/test/auth/legacy/connection-string.json b/test/auth/legacy/connection-string.json index 57fd9d4a11..ab559582ae 100644 --- a/test/auth/legacy/connection-string.json +++ b/test/auth/legacy/connection-string.json @@ -127,47 +127,6 @@ "uri": "mongodb://localhost/?authMechanism=GSSAPI", "valid": false }, - { - "description": "should recognize the mechanism (MONGODB-CR)", - "uri": "mongodb://user:password@localhost/?authMechanism=MONGODB-CR", - "valid": true, - "credential": { - "username": "user", - "password": "password", - "source": "admin", - "mechanism": "MONGODB-CR", - "mechanism_properties": null - } - }, - { - "description": "should use the database when no authSource is specified (MONGODB-CR)", - "uri": "mongodb://user:password@localhost/foo?authMechanism=MONGODB-CR", - "valid": true, - "credential": { - "username": "user", - "password": "password", - "source": "foo", - "mechanism": "MONGODB-CR", - "mechanism_properties": null - } - }, - { - "description": "should use the authSource when specified (MONGODB-CR)", - "uri": "mongodb://user:password@localhost/foo?authMechanism=MONGODB-CR&authSource=bar", - "valid": true, - "credential": { - "username": "user", - "password": "password", - "source": "bar", - "mechanism": "MONGODB-CR", - "mechanism_properties": null - } - }, - { - "description": "should throw an exception if no username is supplied (MONGODB-CR)", - "uri": "mongodb://localhost/?authMechanism=MONGODB-CR", - "valid": false - }, { "description": "should recognize the mechanism (MONGODB-X509)", "uri": "mongodb://CN%3DmyName%2COU%3DmyOrgUnit%2CO%3DmyOrg%2CL%3DmyLocality%2CST%3DmyState%2CC%3DmyCountry@localhost/?authMechanism=MONGODB-X509", diff --git a/test/connection_string/test/valid-auth.json b/test/connection_string/test/valid-auth.json index 4f684ff185..12192fab4c 100644 --- a/test/connection_string/test/valid-auth.json +++ b/test/connection_string/test/valid-auth.json @@ -220,29 +220,8 @@ "options": null }, { - "description": "Escaped user info and database (MONGODB-CR)", - "uri": "mongodb://%24am:f%3Azzb%40z%2Fz%3D@127.0.0.1/admin%3F?authMechanism=MONGODB-CR", - "valid": true, - "warning": false, - "hosts": [ - { - "type": "ipv4", - "host": "127.0.0.1", - "port": null - } - ], - "auth": { - "username": "$am", - "password": "f:zzb@z/z=", - "db": "admin?" - }, - "options": { - "authmechanism": "MONGODB-CR" - } - }, - { - "description": "Subdelimiters in user/pass don't need escaping (MONGODB-CR)", - "uri": "mongodb://!$&'()*+,;=:!$&'()*+,;=@127.0.0.1/admin?authMechanism=MONGODB-CR", + "description": "Subdelimiters in user/pass don't need escaping (PLAIN)", + "uri": "mongodb://!$&'()*+,;=:!$&'()*+,;=@127.0.0.1/admin?authMechanism=PLAIN", "valid": true, "warning": false, "hosts": [ @@ -258,7 +237,7 @@ "db": "admin" }, "options": { - "authmechanism": "MONGODB-CR" + "authmechanism": "PLAIN" } }, { diff --git a/test/connection_string/test/valid-options.json b/test/connection_string/test/valid-options.json index 3c79fe7ae5..6c86172d08 100644 --- a/test/connection_string/test/valid-options.json +++ b/test/connection_string/test/valid-options.json @@ -2,7 +2,7 @@ "tests": [ { "description": "Option names are normalized to lowercase", - "uri": "mongodb://alice:secret@example.com/admin?AUTHMechanism=MONGODB-CR", + "uri": "mongodb://alice:secret@example.com/admin?AUTHMechanism=PLAIN", "valid": true, "warning": false, "hosts": [ @@ -18,7 +18,7 @@ "db": "admin" }, "options": { - "authmechanism": "MONGODB-CR" + "authmechanism": "PLAIN" } }, { diff --git a/test/test_uri_parser.py b/test/test_uri_parser.py index 2a68e9a2cd..f95717e95f 100644 --- a/test/test_uri_parser.py +++ b/test/test_uri_parser.py @@ -142,7 +142,6 @@ def test_split_options(self): self.assertEqual({"fsync": True}, split_options("fsync=true")) self.assertEqual({"fsync": False}, split_options("fsync=false")) self.assertEqual({"authmechanism": "GSSAPI"}, split_options("authMechanism=GSSAPI")) - self.assertEqual({"authmechanism": "MONGODB-CR"}, split_options("authMechanism=MONGODB-CR")) self.assertEqual( {"authmechanism": "SCRAM-SHA-1"}, split_options("authMechanism=SCRAM-SHA-1") ) @@ -295,30 +294,30 @@ def test_parse_uri(self): # Various authentication tests res = copy.deepcopy(orig) - res["options"] = {"authmechanism": "MONGODB-CR"} + res["options"] = {"authmechanism": "SCRAM-SHA-256"} res["username"] = "user" res["password"] = "password" self.assertEqual( - res, parse_uri("mongodb://user:password@localhost/?authMechanism=MONGODB-CR") + res, parse_uri("mongodb://user:password@localhost/?authMechanism=SCRAM-SHA-256") ) res = copy.deepcopy(orig) - res["options"] = {"authmechanism": "MONGODB-CR", "authsource": "bar"} + res["options"] = {"authmechanism": "SCRAM-SHA-256", "authsource": "bar"} res["username"] = "user" res["password"] = "password" res["database"] = "foo" self.assertEqual( res, parse_uri( - "mongodb://user:password@localhost/foo?authSource=bar;authMechanism=MONGODB-CR" + "mongodb://user:password@localhost/foo?authSource=bar;authMechanism=SCRAM-SHA-256" ), ) res = copy.deepcopy(orig) - res["options"] = {"authmechanism": "MONGODB-CR"} + res["options"] = {"authmechanism": "SCRAM-SHA-256"} res["username"] = "user" res["password"] = "" - self.assertEqual(res, parse_uri("mongodb://user:@localhost/?authMechanism=MONGODB-CR")) + self.assertEqual(res, parse_uri("mongodb://user:@localhost/?authMechanism=SCRAM-SHA-256")) res = copy.deepcopy(orig) res["username"] = "user@domain.com" From 92d6a732c5492adcb360b624af2b607b47ed31ea Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Wed, 30 Oct 2024 14:06:54 -0500 Subject: [PATCH 081/182] PYTHON-3906 & PYTHON-2867 Implement GSSAPI ServiceHost support and expand canonicalization options (#1983) --- pymongo/auth_shared.py | 17 ++++++- pymongo/common.py | 12 ++++- test/auth/legacy/connection-string.json | 56 ++++++++++++++++++--- test/connection_string/test/valid-auth.json | 5 +- 4 files changed, 78 insertions(+), 12 deletions(-) diff --git a/pymongo/auth_shared.py b/pymongo/auth_shared.py index 11d08ffe9c..fa25aa3faa 100644 --- a/pymongo/auth_shared.py +++ b/pymongo/auth_shared.py @@ -77,7 +77,7 @@ def __hash__(self) -> int: GSSAPIProperties = namedtuple( - "GSSAPIProperties", ["service_name", "canonicalize_host_name", "service_realm"] + "GSSAPIProperties", ["service_name", "canonicalize_host_name", "service_realm", "service_host"] ) """Mechanism properties for GSSAPI authentication.""" @@ -86,6 +86,16 @@ def __hash__(self) -> int: """Mechanism properties for MONGODB-AWS authentication.""" +def _validate_canonicalize_host_name(value: str | bool) -> str | bool: + valid_names = [False, True, "none", "forward", "forwardAndReverse"] + if value in ["true", "false", True, False]: + return value in ["true", True] + + if value not in valid_names: + raise ValueError(f"CANONICALIZE_HOST_NAME '{value}' not in valid options: {valid_names}") + return value + + def _build_credentials_tuple( mech: str, source: Optional[str], @@ -102,12 +112,15 @@ def _build_credentials_tuple( raise ValueError("authentication source must be $external or None for GSSAPI") properties = extra.get("authmechanismproperties", {}) service_name = properties.get("SERVICE_NAME", "mongodb") - canonicalize = bool(properties.get("CANONICALIZE_HOST_NAME", False)) + service_host = properties.get("SERVICE_HOST", None) + canonicalize = properties.get("CANONICALIZE_HOST_NAME", "false") + canonicalize = _validate_canonicalize_host_name(canonicalize) service_realm = properties.get("SERVICE_REALM") props = GSSAPIProperties( service_name=service_name, canonicalize_host_name=canonicalize, service_realm=service_realm, + service_host=service_host, ) # Source is always $external. return MongoCredential(mech, "$external", user, passwd, props, None) diff --git a/pymongo/common.py b/pymongo/common.py index 87aa936f5d..d4601a0eb5 100644 --- a/pymongo/common.py +++ b/pymongo/common.py @@ -139,6 +139,9 @@ # Default value for serverMonitoringMode SERVER_MONITORING_MODE = "auto" # poll/stream/auto +# Auth mechanism properties that must raise an error instead of warning if they invalidate. +_MECH_PROP_MUST_RAISE = ["CANONICALIZE_HOST_NAME"] + def partition_node(node: str) -> tuple[str, int]: """Split a host:port string into (host, int(port)) pair.""" @@ -423,6 +426,7 @@ def validate_read_preference_tags(name: str, value: Any) -> list[dict[str, str]] _MECHANISM_PROPS = frozenset( [ "SERVICE_NAME", + "SERVICE_HOST", "CANONICALIZE_HOST_NAME", "SERVICE_REALM", "AWS_SESSION_TOKEN", @@ -476,7 +480,9 @@ def validate_auth_mechanism_properties(option: str, value: Any) -> dict[str, Uni ) if key == "CANONICALIZE_HOST_NAME": - props[key] = validate_boolean_or_string(key, val) + from pymongo.auth_shared import _validate_canonicalize_host_name + + props[key] = _validate_canonicalize_host_name(val) else: props[key] = val @@ -867,6 +873,10 @@ def get_setter_key(x: str) -> str: validator = _get_validator(opt, URI_OPTIONS_VALIDATOR_MAP, normed_key=normed_key) validated = validator(opt, value) except (ValueError, TypeError, ConfigurationError) as exc: + if normed_key == "authmechanismproperties" and any( + p in str(exc) for p in _MECH_PROP_MUST_RAISE + ): + raise if warn: warnings.warn(str(exc), stacklevel=2) else: diff --git a/test/auth/legacy/connection-string.json b/test/auth/legacy/connection-string.json index ab559582ae..67aafbff6e 100644 --- a/test/auth/legacy/connection-string.json +++ b/test/auth/legacy/connection-string.json @@ -80,7 +80,7 @@ }, { "description": "should accept generic mechanism property (GSSAPI)", - "uri": "mongodb://user%40DOMAIN.COM@localhost/?authMechanism=GSSAPI&authMechanismProperties=SERVICE_NAME:other,CANONICALIZE_HOST_NAME:true", + "uri": "mongodb://user%40DOMAIN.COM@localhost/?authMechanism=GSSAPI&authMechanismProperties=SERVICE_NAME:other,CANONICALIZE_HOST_NAME:forward,SERVICE_HOST:example.com", "valid": true, "credential": { "username": "user@DOMAIN.COM", @@ -89,10 +89,46 @@ "mechanism": "GSSAPI", "mechanism_properties": { "SERVICE_NAME": "other", - "CANONICALIZE_HOST_NAME": true + "SERVICE_HOST": "example.com", + "CANONICALIZE_HOST_NAME": "forward" } } }, + { + "description": "should accept forwardAndReverse hostname canonicalization (GSSAPI)", + "uri": "mongodb://user%40DOMAIN.COM@localhost/?authMechanism=GSSAPI&authMechanismProperties=SERVICE_NAME:other,CANONICALIZE_HOST_NAME:forwardAndReverse", + "valid": true, + "credential": { + "username": "user@DOMAIN.COM", + "password": null, + "source": "$external", + "mechanism": "GSSAPI", + "mechanism_properties": { + "SERVICE_NAME": "other", + "CANONICALIZE_HOST_NAME": "forwardAndReverse" + } + } + }, + { + "description": "should accept no hostname canonicalization (GSSAPI)", + "uri": "mongodb://user%40DOMAIN.COM@localhost/?authMechanism=GSSAPI&authMechanismProperties=SERVICE_NAME:other,CANONICALIZE_HOST_NAME:none", + "valid": true, + "credential": { + "username": "user@DOMAIN.COM", + "password": null, + "source": "$external", + "mechanism": "GSSAPI", + "mechanism_properties": { + "SERVICE_NAME": "other", + "CANONICALIZE_HOST_NAME": "none" + } + } + }, + { + "description": "must raise an error when the hostname canonicalization is invalid", + "uri": "mongodb://user%40DOMAIN.COM@localhost/?authMechanism=GSSAPI&authMechanismProperties=SERVICE_NAME:other,CANONICALIZE_HOST_NAME:invalid", + "valid": false + }, { "description": "should accept the password (GSSAPI)", "uri": "mongodb://user%40DOMAIN.COM:password@localhost/?authMechanism=GSSAPI&authSource=$external", @@ -433,14 +469,14 @@ } }, { - "description": "should throw an exception if username and password is specified for test environment (MONGODB-OIDC)", + "description": "should throw an exception if supplied a password (MONGODB-OIDC)", "uri": "mongodb://user:pass@localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=ENVIRONMENT:test", "valid": false, "credential": null }, { - "description": "should throw an exception if username is specified for test environment (MONGODB-OIDC)", - "uri": "mongodb://principalName@localhost/?authMechanism=MONGODB-OIDC&ENVIRONMENT:test", + "description": "should throw an exception if username is specified for test (MONGODB-OIDC)", + "uri": "mongodb://principalName@localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=ENVIRONMENT:test", "valid": false, "credential": null }, @@ -451,11 +487,17 @@ "credential": null }, { - "description": "should throw an exception if neither provider nor callbacks specified (MONGODB-OIDC)", + "description": "should throw an exception if neither environment nor callbacks specified (MONGODB-OIDC)", "uri": "mongodb://localhost/?authMechanism=MONGODB-OIDC", "valid": false, "credential": null }, + { + "description": "should throw an exception when unsupported auth property is specified (MONGODB-OIDC)", + "uri": "mongodb://localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=UnsupportedProperty:unexisted", + "valid": false, + "credential": null + }, { "description": "should recognise the mechanism with azure provider (MONGODB-OIDC)", "uri": "mongodb://localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=ENVIRONMENT:azure,TOKEN_RESOURCE:foo", @@ -586,4 +628,4 @@ "credential": null } ] -} \ No newline at end of file +} diff --git a/test/connection_string/test/valid-auth.json b/test/connection_string/test/valid-auth.json index 12192fab4c..60f63f4e3f 100644 --- a/test/connection_string/test/valid-auth.json +++ b/test/connection_string/test/valid-auth.json @@ -263,7 +263,7 @@ }, { "description": "Escaped username (GSSAPI)", - "uri": "mongodb://user%40EXAMPLE.COM:secret@localhost/?authMechanismProperties=SERVICE_NAME:other,CANONICALIZE_HOST_NAME:true&authMechanism=GSSAPI", + "uri": "mongodb://user%40EXAMPLE.COM:secret@localhost/?authMechanismProperties=SERVICE_NAME:other,CANONICALIZE_HOST_NAME:forward,SERVICE_HOST:example.com&authMechanism=GSSAPI", "valid": true, "warning": false, "hosts": [ @@ -282,7 +282,8 @@ "authmechanism": "GSSAPI", "authmechanismproperties": { "SERVICE_NAME": "other", - "CANONICALIZE_HOST_NAME": true + "SERVICE_HOST": "example.com", + "CANONICALIZE_HOST_NAME": "forward" } } }, From 2332d69328c28e4a972b633b88703a819ef17ba1 Mon Sep 17 00:00:00 2001 From: Noah Stapp Date: Wed, 30 Oct 2024 15:37:00 -0400 Subject: [PATCH 082/182] PYTHON-4807 - Update changelog + remove dead code (#1984) --- doc/changelog.rst | 4 +++ pymongo/asynchronous/client_bulk.py | 49 +++-------------------------- pymongo/synchronous/client_bulk.py | 49 +++-------------------------- 3 files changed, 14 insertions(+), 88 deletions(-) diff --git a/doc/changelog.rst b/doc/changelog.rst index 94d991868d..22b0c744a9 100644 --- a/doc/changelog.rst +++ b/doc/changelog.rst @@ -24,6 +24,10 @@ PyMongo 4.11 brings a number of changes including: :meth:`~pymongo.collection.Collection.update_one`, :meth:`~pymongo.collection.Collection.replace_one`, :class:`~pymongo.operations.UpdateOne`, and :class:`~pymongo.operations.UpdateMany`, +- :meth:`~pymongo.mongo_client.MongoClient.bulk_write` and + :meth:`~pymongo.asynchronous.mongo_client.AsyncMongoClient.bulk_write` now throw an error + when ``ordered=True`` or ``verboseResults=True`` are used with unacknowledged writes. + These are unavoidable breaking changes. Issues Resolved ............... diff --git a/pymongo/asynchronous/client_bulk.py b/pymongo/asynchronous/client_bulk.py index 96571c21eb..a6f7178e47 100644 --- a/pymongo/asynchronous/client_bulk.py +++ b/pymongo/asynchronous/client_bulk.py @@ -681,11 +681,11 @@ async def retryable_bulk( _throw_client_bulk_write_exception(full_result, self.verbose_results) return full_result - async def execute_command_unack_unordered( + async def execute_command_unack( self, conn: AsyncConnection, ) -> None: - """Execute commands with OP_MSG and w=0 writeConcern, unordered.""" + """Execute commands with OP_MSG and w=0 writeConcern. Always unordered.""" db_name = "admin" cmd_name = "bulkWrite" listeners = self.client._event_listeners @@ -704,8 +704,8 @@ async def execute_command_unack_unordered( while self.idx_offset < self.total_ops: # Construct the server command, specifying the relevant options. cmd = {"bulkWrite": 1} - cmd["errorsOnly"] = not self.verbose_results - cmd["ordered"] = self.ordered # type: ignore[assignment] + cmd["errorsOnly"] = True + cmd["ordered"] = False if self.bypass_doc_val is not None: cmd["bypassDocumentValidation"] = self.bypass_doc_val cmd["writeConcern"] = {"w": 0} # type: ignore[assignment] @@ -723,43 +723,6 @@ async def execute_command_unack_unordered( self.idx_offset += len(to_send_ops) - async def execute_command_unack_ordered( - self, - conn: AsyncConnection, - ) -> None: - """Execute commands with OP_MSG and w=0 WriteConcern, ordered.""" - full_result: MutableMapping[str, Any] = { - "anySuccessful": False, - "error": None, - "writeErrors": [], - "writeConcernErrors": [], - "nInserted": 0, - "nUpserted": 0, - "nMatched": 0, - "nModified": 0, - "nDeleted": 0, - "insertResults": {}, - "updateResults": {}, - "deleteResults": {}, - } - # Ordered bulk writes have to be acknowledged so that we stop - # processing at the first error, even when the application - # specified unacknowledged writeConcern. - initial_write_concern = WriteConcern() - op_id = _randint() - try: - await self._execute_command( - initial_write_concern, - None, - conn, - op_id, - False, - full_result, - self.write_concern, - ) - except OperationFailure: - pass - async def execute_no_results( self, conn: AsyncConnection, @@ -775,9 +738,7 @@ async def execute_no_results( "Cannot set bypass_document_validation with unacknowledged write concern" ) - if self.ordered: - return await self.execute_command_unack_ordered(conn) - return await self.execute_command_unack_unordered(conn) + return await self.execute_command_unack(conn) async def execute( self, diff --git a/pymongo/synchronous/client_bulk.py b/pymongo/synchronous/client_bulk.py index 2c38b1d76c..6cb4275417 100644 --- a/pymongo/synchronous/client_bulk.py +++ b/pymongo/synchronous/client_bulk.py @@ -679,11 +679,11 @@ def retryable_bulk( _throw_client_bulk_write_exception(full_result, self.verbose_results) return full_result - def execute_command_unack_unordered( + def execute_command_unack( self, conn: Connection, ) -> None: - """Execute commands with OP_MSG and w=0 writeConcern, unordered.""" + """Execute commands with OP_MSG and w=0 writeConcern. Always unordered.""" db_name = "admin" cmd_name = "bulkWrite" listeners = self.client._event_listeners @@ -702,8 +702,8 @@ def execute_command_unack_unordered( while self.idx_offset < self.total_ops: # Construct the server command, specifying the relevant options. cmd = {"bulkWrite": 1} - cmd["errorsOnly"] = not self.verbose_results - cmd["ordered"] = self.ordered # type: ignore[assignment] + cmd["errorsOnly"] = True + cmd["ordered"] = False if self.bypass_doc_val is not None: cmd["bypassDocumentValidation"] = self.bypass_doc_val cmd["writeConcern"] = {"w": 0} # type: ignore[assignment] @@ -721,43 +721,6 @@ def execute_command_unack_unordered( self.idx_offset += len(to_send_ops) - def execute_command_unack_ordered( - self, - conn: Connection, - ) -> None: - """Execute commands with OP_MSG and w=0 WriteConcern, ordered.""" - full_result: MutableMapping[str, Any] = { - "anySuccessful": False, - "error": None, - "writeErrors": [], - "writeConcernErrors": [], - "nInserted": 0, - "nUpserted": 0, - "nMatched": 0, - "nModified": 0, - "nDeleted": 0, - "insertResults": {}, - "updateResults": {}, - "deleteResults": {}, - } - # Ordered bulk writes have to be acknowledged so that we stop - # processing at the first error, even when the application - # specified unacknowledged writeConcern. - initial_write_concern = WriteConcern() - op_id = _randint() - try: - self._execute_command( - initial_write_concern, - None, - conn, - op_id, - False, - full_result, - self.write_concern, - ) - except OperationFailure: - pass - def execute_no_results( self, conn: Connection, @@ -773,9 +736,7 @@ def execute_no_results( "Cannot set bypass_document_validation with unacknowledged write concern" ) - if self.ordered: - return self.execute_command_unack_ordered(conn) - return self.execute_command_unack_unordered(conn) + return self.execute_command_unack(conn) def execute( self, From 351196b91b5df0383fd56be5dd2c8f139ccd3a14 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Wed, 30 Oct 2024 15:46:52 -0500 Subject: [PATCH 083/182] PYTHON-4813 Update specification documentation links (#1977) --- README.md | 2 +- bson/json_util.py | 2 +- doc/api/index.rst | 2 +- doc/changelog.rst | 8 ++++---- doc/conf.py | 2 +- doc/developer/periodic_executor.rst | 2 +- doc/examples/uuid.rst | 2 +- doc/migrate-to-pymongo4.rst | 4 ++-- pymongo/asynchronous/mongo_client.py | 6 +++--- pymongo/pool_options.py | 2 +- pymongo/synchronous/mongo_client.py | 6 +++--- test/asynchronous/test_auth.py | 2 +- test/asynchronous/test_encryption.py | 20 ++++++++++---------- test/asynchronous/unified_format.py | 4 ++-- test/test_auth.py | 2 +- test/test_dbref.py | 2 +- test/test_encryption.py | 20 ++++++++++---------- test/test_streaming_protocol.py | 2 +- test/unified_format.py | 4 ++-- test/unified_format_shared.py | 2 +- 20 files changed, 48 insertions(+), 48 deletions(-) diff --git a/README.md b/README.md index f5e2cdf46d..bd0755620e 100644 --- a/README.md +++ b/README.md @@ -11,7 +11,7 @@ The PyMongo distribution contains tools for interacting with MongoDB database from Python. The `bson` package is an implementation of the [BSON format](http://bsonspec.org) for Python. The `pymongo` package is a native Python driver for MongoDB. The `gridfs` package is a -[gridfs](https://github.com/mongodb/specifications/blob/master/source/gridfs/gridfs-spec.rst/) +[gridfs](https://github.com/mongodb/specifications/blob/master/source/gridfs/gridfs-spec.md/) implementation on top of `pymongo`. PyMongo supports MongoDB 4.0, 4.2, 4.4, 5.0, 6.0, 7.0, and 8.0. diff --git a/bson/json_util.py b/bson/json_util.py index 6f34e4103d..a171327ead 100644 --- a/bson/json_util.py +++ b/bson/json_util.py @@ -22,7 +22,7 @@ when :const:`CANONICAL_JSON_OPTIONS` or :const:`LEGACY_JSON_OPTIONS` is provided, respectively. -.. _Extended JSON: https://github.com/mongodb/specifications/blob/master/source/extended-json.rst +.. _Extended JSON: https://github.com/mongodb/specifications/blob/master/source/extended-json/extended-json.md Example usage (deserialization): diff --git a/doc/api/index.rst b/doc/api/index.rst index 30ae3608ca..437f2cc6a6 100644 --- a/doc/api/index.rst +++ b/doc/api/index.rst @@ -6,7 +6,7 @@ interacting with MongoDB. :mod:`bson` is an implementation of the `BSON format `_, :mod:`pymongo` is a full-featured driver for MongoDB, and :mod:`gridfs` is a set of tools for working with the `GridFS -`_ storage +`_ storage specification. .. toctree:: diff --git a/doc/changelog.rst b/doc/changelog.rst index 22b0c744a9..bd4eafe3ef 100644 --- a/doc/changelog.rst +++ b/doc/changelog.rst @@ -1027,7 +1027,7 @@ See the `PyMongo 4.0 release notes in JIRA`_ for the list of resolved issues in this release. .. _PyMongo 4.0 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=18463 -.. _DBRef specification: https://github.com/mongodb/specifications/blob/5a8c8d7/source/dbref.rst +.. _DBRef specification: https://github.com/mongodb/specifications/blob/master/source/dbref/dbref.md Changes in Version 3.13.0 (2022/11/01) -------------------------------------- @@ -1562,7 +1562,7 @@ Unavoidable breaking changes: bumped to 1.16.0. This is a breaking change for applications that use PyMongo's SRV support with a version of ``dnspython`` older than 1.16.0. -.. _URI options specification: https://github.com/mongodb/specifications/blob/master/source/uri-options/uri-options.rst +.. _URI options specification: https://github.com/mongodb/specifications/blob/master/source/uri-options/uri-options.md Issues Resolved @@ -1586,7 +1586,7 @@ Changes in Version 3.8.0 (2019/04/22) must upgrade to PyPy3.5+. - :class:`~bson.objectid.ObjectId` now implements the `ObjectID specification - version 0.2 `_. + version 0.2 `_. - For better performance and to better follow the GridFS spec, :class:`~gridfs.grid_file.GridOut` now uses a single cursor to read all the chunks in the file. Previously, each chunk in the file was queried @@ -1948,7 +1948,7 @@ Highlights include: :class:`~pymongo.operations.UpdateOne`, and :class:`~pymongo.operations.UpdateMany`. - Implemented the `MongoDB Extended JSON - `_ + `_ specification. - :class:`~bson.decimal128.Decimal128` now works when cdecimal is installed. - PyMongo is now tested against a wider array of operating systems and CPU diff --git a/doc/conf.py b/doc/conf.py index f0d9f921bb..f82c719361 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -85,7 +85,7 @@ # wiki.centos.org has been flakey. # sourceforge.net is giving a 403 error, but is still accessible from the browser. linkcheck_ignore = [ - "https://github.com/mongodb/specifications/blob/master/source/server-discovery-and-monitoring/server-monitoring.rst#requesting-an-immediate-check", + "https://github.com/mongodb/specifications/blob/master/source/server-discovery-and-monitoring/server-monitoring.md#requesting-an-immediate-check", "https://github.com/mongodb/libmongocrypt/blob/master/bindings/python/README.rst#installing-from-source", r"https://wiki.centos.org/[\w/]*", r"http://sourceforge.net/", diff --git a/doc/developer/periodic_executor.rst b/doc/developer/periodic_executor.rst index effe18efca..67eaa89f10 100644 --- a/doc/developer/periodic_executor.rst +++ b/doc/developer/periodic_executor.rst @@ -106,7 +106,7 @@ Thus the current design of periodic executors is surprisingly simple: they do a simple ``time.sleep`` for a half-second, check if it is time to wake or terminate, and sleep again. -.. _Server Discovery And Monitoring Spec: https://github.com/mongodb/specifications/blob/master/source/server-discovery-and-monitoring/server-monitoring.rst#requesting-an-immediate-check +.. _Server Discovery And Monitoring Spec: https://github.com/mongodb/specifications/blob/master/source/server-discovery-and-monitoring/server-monitoring.md#requesting-an-immediate-check .. _PYTHON-863: https://jira.mongodb.org/browse/PYTHON-863 diff --git a/doc/examples/uuid.rst b/doc/examples/uuid.rst index 90ec71ebe2..350db14d9a 100644 --- a/doc/examples/uuid.rst +++ b/doc/examples/uuid.rst @@ -84,7 +84,7 @@ Finally, the same UUID would historically be serialized by the Java driver as:: .. note:: For in-depth information about the the byte-order historically used by different drivers, see the `Handling of Native UUID Types Specification - `_. + `_. This difference in the byte-order of UUIDs encoded by different drivers can result in highly unintuitive behavior in some scenarios. We detail two such diff --git a/doc/migrate-to-pymongo4.rst b/doc/migrate-to-pymongo4.rst index bc6da85560..3e992a8249 100644 --- a/doc/migrate-to-pymongo4.rst +++ b/doc/migrate-to-pymongo4.rst @@ -118,7 +118,7 @@ Renamed URI options Several deprecated URI options have been renamed to the standardized option names defined in the -`URI options specification `_. +`URI options specification `_. The old option names and their renamed equivalents are summarized in the table below. Some renamed options have different semantics from the option being replaced as noted in the 'Migration Notes' column. @@ -965,7 +965,7 @@ correct type. Otherwise the document is returned as normal. Previously, any subdocument containing a ``$ref`` field would be decoded as a :class:`~bson.dbref.DBRef`. -.. _DBRef specification: https://github.com/mongodb/specifications/blob/5a8c8d7/source/dbref.rst +.. _DBRef specification: https://github.com/mongodb/specifications/blob/master/source/dbref/dbref.md Encoding a UUID raises an error by default .......................................... diff --git a/pymongo/asynchronous/mongo_client.py b/pymongo/asynchronous/mongo_client.py index a71e4cb5cd..e4fdf25c28 100644 --- a/pymongo/asynchronous/mongo_client.py +++ b/pymongo/asynchronous/mongo_client.py @@ -221,7 +221,7 @@ def __init__( `_. See the `Initial DNS Seedlist Discovery spec `_ + initial-dns-seedlist-discovery/initial-dns-seedlist-discovery.md>`_ for more details. Note that the use of SRV URIs implicitly enables TLS support. Pass tls=false in the URI to override. @@ -367,7 +367,7 @@ def __init__( :meth:`~pymongo.asynchronous.collection.AsyncCollection.aggregate` using the ``$out`` pipeline operator and any operation with an unacknowledged write concern (e.g. {w: 0})). See - https://github.com/mongodb/specifications/blob/master/source/retryable-writes/retryable-writes.rst + https://github.com/mongodb/specifications/blob/master/source/retryable-writes/retryable-writes.md - `retryReads`: (boolean) Whether supported read operations executed within this AsyncMongoClient will be retried once after a network error. Defaults to ``True``. @@ -394,7 +394,7 @@ def __init__( transient errors such as network failures, database upgrades, and replica set failovers. For an exact definition of which errors trigger a retry, see the `retryable reads specification - `_. + `_. - `compressors`: Comma separated list of compressors for wire protocol compression. The list is used to negotiate a compressor diff --git a/pymongo/pool_options.py b/pymongo/pool_options.py index 61486c91c6..f3ed6cd2c1 100644 --- a/pymongo/pool_options.py +++ b/pymongo/pool_options.py @@ -216,7 +216,7 @@ def _metadata_env() -> dict[str, Any]: _MAX_METADATA_SIZE = 512 -# See: https://github.com/mongodb/specifications/blob/5112bcc/source/mongodb-handshake/handshake.rst#limitations +# See: https://github.com/mongodb/specifications/blob/master/source/mongodb-handshake/handshake.md#limitations def _truncate_metadata(metadata: MutableMapping[str, Any]) -> None: """Perform metadata truncation.""" if len(bson.encode(metadata)) <= _MAX_METADATA_SIZE: diff --git a/pymongo/synchronous/mongo_client.py b/pymongo/synchronous/mongo_client.py index 24696f0c8e..0380d4468b 100644 --- a/pymongo/synchronous/mongo_client.py +++ b/pymongo/synchronous/mongo_client.py @@ -216,7 +216,7 @@ def __init__( `_. See the `Initial DNS Seedlist Discovery spec `_ + initial-dns-seedlist-discovery/initial-dns-seedlist-discovery.md>`_ for more details. Note that the use of SRV URIs implicitly enables TLS support. Pass tls=false in the URI to override. @@ -365,7 +365,7 @@ def __init__( :meth:`~pymongo.collection.Collection.aggregate` using the ``$out`` pipeline operator and any operation with an unacknowledged write concern (e.g. {w: 0})). See - https://github.com/mongodb/specifications/blob/master/source/retryable-writes/retryable-writes.rst + https://github.com/mongodb/specifications/blob/master/source/retryable-writes/retryable-writes.md - `retryReads`: (boolean) Whether supported read operations executed within this MongoClient will be retried once after a network error. Defaults to ``True``. @@ -392,7 +392,7 @@ def __init__( transient errors such as network failures, database upgrades, and replica set failovers. For an exact definition of which errors trigger a retry, see the `retryable reads specification - `_. + `_. - `compressors`: Comma separated list of compressors for wire protocol compression. The list is used to negotiate a compressor diff --git a/test/asynchronous/test_auth.py b/test/asynchronous/test_auth.py index 9262714374..4f26200fb0 100644 --- a/test/asynchronous/test_auth.py +++ b/test/asynchronous/test_auth.py @@ -375,7 +375,7 @@ async def test_scram_sha1(self): await db.command("dbstats") -# https://github.com/mongodb/specifications/blob/master/source/auth/auth.rst#scram-sha-256-and-mechanism-negotiation +# https://github.com/mongodb/specifications/blob/master/source/auth/auth.md#scram-sha-256-and-mechanism-negotiation class TestSCRAM(AsyncIntegrationTest): @async_client_context.require_auth @async_client_context.require_version_min(3, 7, 2) diff --git a/test/asynchronous/test_encryption.py b/test/asynchronous/test_encryption.py index 40f1acd32d..e42c85aa7a 100644 --- a/test/asynchronous/test_encryption.py +++ b/test/asynchronous/test_encryption.py @@ -1610,7 +1610,7 @@ async def test_automatic(self): return await self._test_automatic(expected_document_extjson, {"secret_gcp": "string0"}) -# https://github.com/mongodb/specifications/blob/master/source/client-side-encryption/tests/README.rst#deadlock-tests +# https://github.com/mongodb/specifications/blob/master/source/client-side-encryption/tests/README.md#deadlock-tests class TestDeadlockProse(AsyncEncryptionIntegrationTest): async def asyncSetUp(self): self.client_test = await self.async_rs_or_single_client( @@ -1837,7 +1837,7 @@ async def test_case_8(self): self.assertEqual(len(self.topology_listener.results["opened"]), 1) -# https://github.com/mongodb/specifications/blob/master/source/client-side-encryption/tests/README.rst#14-decryption-events +# https://github.com/mongodb/specifications/blob/master/source/client-side-encryption/tests/README.md#14-decryption-events class TestDecryptProse(AsyncEncryptionIntegrationTest): async def asyncSetUp(self): self.client = async_client_context.client @@ -1909,7 +1909,7 @@ async def test_04_decrypt_success(self): self.assertEqual(event.reply["cursor"]["firstBatch"][0]["encrypted"], self.cipher_text) -# https://github.com/mongodb/specifications/blob/master/source/client-side-encryption/tests/README.rst#bypass-spawning-mongocryptd +# https://github.com/mongodb/specifications/blob/master/source/client-side-encryption/tests/README.md#bypass-spawning-mongocryptd class TestBypassSpawningMongocryptdProse(AsyncEncryptionIntegrationTest): @unittest.skipIf( os.environ.get("TEST_CRYPT_SHARED"), @@ -1990,7 +1990,7 @@ async def test_via_loading_shared_library(self): with self.assertRaises(ServerSelectionTimeoutError): await no_mongocryptd_client.db.command("ping") - # https://github.com/mongodb/specifications/blob/master/source/client-side-encryption/tests/README.rst#20-bypass-creating-mongocryptd-client-when-shared-library-is-loaded + # https://github.com/mongodb/specifications/blob/master/source/client-side-encryption/tests/README.md#20-bypass-creating-mongocryptd-client-when-shared-library-is-loaded @unittest.skipUnless(os.environ.get("TEST_CRYPT_SHARED"), "crypt_shared lib is not installed") async def test_client_via_loading_shared_library(self): connection_established = False @@ -2066,7 +2066,7 @@ async def test_invalid_hostname_in_kms_certificate(self): await self.client_encrypted.create_data_key("aws", master_key=key) -# https://github.com/mongodb/specifications/blob/master/source/client-side-encryption/tests/README.rst#kms-tls-options-tests +# https://github.com/mongodb/specifications/blob/master/source/client-side-encryption/tests/README.md#kms-tls-options-tests class TestKmsTLSOptions(AsyncEncryptionIntegrationTest): @unittest.skipUnless(any(AWS_CREDS.values()), "AWS environment credentials are not set") async def asyncSetUp(self): @@ -2272,7 +2272,7 @@ async def test_06_named_kms_providers_apply_tls_options_kmip(self): await self.client_encryption_with_names.create_data_key("kmip:with_tls") -# https://github.com/mongodb/specifications/blob/50e26fe/source/client-side-encryption/tests/README.rst#unique-index-on-keyaltnames +# https://github.com/mongodb/specifications/blob/50e26fe/source/client-side-encryption/tests/README.md#unique-index-on-keyaltnames class TestUniqueIndexOnKeyAltNamesProse(AsyncEncryptionIntegrationTest): async def asyncSetUp(self): self.client = async_client_context.client @@ -2303,7 +2303,7 @@ async def test_02_add_key_alt_name(self): assert key_doc["keyAltNames"] == ["def"] -# https://github.com/mongodb/specifications/blob/d4c9432/source/client-side-encryption/tests/README.rst#explicit-encryption +# https://github.com/mongodb/specifications/blob/d4c9432/source/client-side-encryption/tests/README.md#explicit-encryption class TestExplicitQueryableEncryption(AsyncEncryptionIntegrationTest): @async_client_context.require_no_standalone @async_client_context.require_version_min(7, 0, -1) @@ -2423,7 +2423,7 @@ async def test_05_roundtrip_encrypted_unindexed(self): self.assertEqual(decrypted, val) -# https://github.com/mongodb/specifications/blob/072601/source/client-side-encryption/tests/README.rst#rewrap +# https://github.com/mongodb/specifications/blob/072601/source/client-side-encryption/tests/README.md#rewrap class TestRewrapWithSeparateClientEncryption(AsyncEncryptionIntegrationTest): MASTER_KEYS: Mapping[str, Mapping[str, Any]] = { "aws": { @@ -2505,7 +2505,7 @@ async def run_test(self, src_provider, dst_provider): ) -# https://github.com/mongodb/specifications/blob/5cf3ed/source/client-side-encryption/tests/README.rst#on-demand-aws-credentials +# https://github.com/mongodb/specifications/blob/5cf3ed/source/client-side-encryption/tests/README.md#on-demand-aws-credentials class TestOnDemandAWSCredentials(AsyncEncryptionIntegrationTest): async def asyncSetUp(self): await super().asyncSetUp() @@ -2869,7 +2869,7 @@ async def test_accepts_trim_factor_0(self): assert len(payload) > len(self.payload_defaults) -# https://github.com/mongodb/specifications/blob/master/source/client-side-encryption/tests/README.rst#automatic-data-encryption-keys +# https://github.com/mongodb/specifications/blob/master/source/client-side-encryption/tests/README.md#automatic-data-encryption-keys class TestAutomaticDecryptionKeys(AsyncEncryptionIntegrationTest): @async_client_context.require_no_standalone @async_client_context.require_version_min(7, 0, -1) diff --git a/test/asynchronous/unified_format.py b/test/asynchronous/unified_format.py index 8f32ac4a2e..b382db474f 100644 --- a/test/asynchronous/unified_format.py +++ b/test/asynchronous/unified_format.py @@ -14,7 +14,7 @@ """Unified test format runner. -https://github.com/mongodb/specifications/blob/master/source/unified-test-format/unified-test-format.rst +https://github.com/mongodb/specifications/blob/master/source/unified-test-format/unified-test-format.md """ from __future__ import annotations @@ -431,7 +431,7 @@ class UnifiedSpecTestMixinV1(AsyncIntegrationTest): """Mixin class to run test cases from test specification files. Assumes that tests conform to the `unified test format - `_. + `_. Specification of the test suite being currently run is available as a class attribute ``TEST_SPEC``. diff --git a/test/test_auth.py b/test/test_auth.py index 310006afff..70c061b747 100644 --- a/test/test_auth.py +++ b/test/test_auth.py @@ -373,7 +373,7 @@ def test_scram_sha1(self): db.command("dbstats") -# https://github.com/mongodb/specifications/blob/master/source/auth/auth.rst#scram-sha-256-and-mechanism-negotiation +# https://github.com/mongodb/specifications/blob/master/source/auth/auth.md#scram-sha-256-and-mechanism-negotiation class TestSCRAM(IntegrationTest): @client_context.require_auth @client_context.require_version_min(3, 7, 2) diff --git a/test/test_dbref.py b/test/test_dbref.py index d170f43f56..ac2767a1ce 100644 --- a/test/test_dbref.py +++ b/test/test_dbref.py @@ -128,7 +128,7 @@ def test_dbref_hash(self): self.assertNotEqual(hash(dbref_1a), hash(dbref_2a)) -# https://github.com/mongodb/specifications/blob/master/source/dbref.rst#test-plan +# https://github.com/mongodb/specifications/blob/master/source/dbref/dbref.md#test-plan class TestDBRefSpec(unittest.TestCase): def test_decoding_1_2_3(self): doc: Any diff --git a/test/test_encryption.py b/test/test_encryption.py index 373981b1d2..0806f91a06 100644 --- a/test/test_encryption.py +++ b/test/test_encryption.py @@ -1604,7 +1604,7 @@ def test_automatic(self): return self._test_automatic(expected_document_extjson, {"secret_gcp": "string0"}) -# https://github.com/mongodb/specifications/blob/master/source/client-side-encryption/tests/README.rst#deadlock-tests +# https://github.com/mongodb/specifications/blob/master/source/client-side-encryption/tests/README.md#deadlock-tests class TestDeadlockProse(EncryptionIntegrationTest): def setUp(self): self.client_test = self.rs_or_single_client( @@ -1829,7 +1829,7 @@ def test_case_8(self): self.assertEqual(len(self.topology_listener.results["opened"]), 1) -# https://github.com/mongodb/specifications/blob/master/source/client-side-encryption/tests/README.rst#14-decryption-events +# https://github.com/mongodb/specifications/blob/master/source/client-side-encryption/tests/README.md#14-decryption-events class TestDecryptProse(EncryptionIntegrationTest): def setUp(self): self.client = client_context.client @@ -1901,7 +1901,7 @@ def test_04_decrypt_success(self): self.assertEqual(event.reply["cursor"]["firstBatch"][0]["encrypted"], self.cipher_text) -# https://github.com/mongodb/specifications/blob/master/source/client-side-encryption/tests/README.rst#bypass-spawning-mongocryptd +# https://github.com/mongodb/specifications/blob/master/source/client-side-encryption/tests/README.md#bypass-spawning-mongocryptd class TestBypassSpawningMongocryptdProse(EncryptionIntegrationTest): @unittest.skipIf( os.environ.get("TEST_CRYPT_SHARED"), @@ -1982,7 +1982,7 @@ def test_via_loading_shared_library(self): with self.assertRaises(ServerSelectionTimeoutError): no_mongocryptd_client.db.command("ping") - # https://github.com/mongodb/specifications/blob/master/source/client-side-encryption/tests/README.rst#20-bypass-creating-mongocryptd-client-when-shared-library-is-loaded + # https://github.com/mongodb/specifications/blob/master/source/client-side-encryption/tests/README.md#20-bypass-creating-mongocryptd-client-when-shared-library-is-loaded @unittest.skipUnless(os.environ.get("TEST_CRYPT_SHARED"), "crypt_shared lib is not installed") def test_client_via_loading_shared_library(self): connection_established = False @@ -2058,7 +2058,7 @@ def test_invalid_hostname_in_kms_certificate(self): self.client_encrypted.create_data_key("aws", master_key=key) -# https://github.com/mongodb/specifications/blob/master/source/client-side-encryption/tests/README.rst#kms-tls-options-tests +# https://github.com/mongodb/specifications/blob/master/source/client-side-encryption/tests/README.md#kms-tls-options-tests class TestKmsTLSOptions(EncryptionIntegrationTest): @unittest.skipUnless(any(AWS_CREDS.values()), "AWS environment credentials are not set") def setUp(self): @@ -2264,7 +2264,7 @@ def test_06_named_kms_providers_apply_tls_options_kmip(self): self.client_encryption_with_names.create_data_key("kmip:with_tls") -# https://github.com/mongodb/specifications/blob/50e26fe/source/client-side-encryption/tests/README.rst#unique-index-on-keyaltnames +# https://github.com/mongodb/specifications/blob/50e26fe/source/client-side-encryption/tests/README.md#unique-index-on-keyaltnames class TestUniqueIndexOnKeyAltNamesProse(EncryptionIntegrationTest): def setUp(self): self.client = client_context.client @@ -2293,7 +2293,7 @@ def test_02_add_key_alt_name(self): assert key_doc["keyAltNames"] == ["def"] -# https://github.com/mongodb/specifications/blob/d4c9432/source/client-side-encryption/tests/README.rst#explicit-encryption +# https://github.com/mongodb/specifications/blob/d4c9432/source/client-side-encryption/tests/README.md#explicit-encryption class TestExplicitQueryableEncryption(EncryptionIntegrationTest): @client_context.require_no_standalone @client_context.require_version_min(7, 0, -1) @@ -2407,7 +2407,7 @@ def test_05_roundtrip_encrypted_unindexed(self): self.assertEqual(decrypted, val) -# https://github.com/mongodb/specifications/blob/072601/source/client-side-encryption/tests/README.rst#rewrap +# https://github.com/mongodb/specifications/blob/072601/source/client-side-encryption/tests/README.md#rewrap class TestRewrapWithSeparateClientEncryption(EncryptionIntegrationTest): MASTER_KEYS: Mapping[str, Mapping[str, Any]] = { "aws": { @@ -2489,7 +2489,7 @@ def run_test(self, src_provider, dst_provider): ) -# https://github.com/mongodb/specifications/blob/5cf3ed/source/client-side-encryption/tests/README.rst#on-demand-aws-credentials +# https://github.com/mongodb/specifications/blob/5cf3ed/source/client-side-encryption/tests/README.md#on-demand-aws-credentials class TestOnDemandAWSCredentials(EncryptionIntegrationTest): def setUp(self): super().setUp() @@ -2851,7 +2851,7 @@ def test_accepts_trim_factor_0(self): assert len(payload) > len(self.payload_defaults) -# https://github.com/mongodb/specifications/blob/master/source/client-side-encryption/tests/README.rst#automatic-data-encryption-keys +# https://github.com/mongodb/specifications/blob/master/source/client-side-encryption/tests/README.md#automatic-data-encryption-keys class TestAutomaticDecryptionKeys(EncryptionIntegrationTest): @client_context.require_no_standalone @client_context.require_version_min(7, 0, -1) diff --git a/test/test_streaming_protocol.py b/test/test_streaming_protocol.py index b3b68703a4..d782aa1dd7 100644 --- a/test/test_streaming_protocol.py +++ b/test/test_streaming_protocol.py @@ -142,7 +142,7 @@ def changed_event(event): @client_context.require_failCommand_appName def test_monitor_waits_after_server_check_error(self): # This test implements: - # https://github.com/mongodb/specifications/blob/6c5b2ac/source/server-discovery-and-monitoring/server-discovery-and-monitoring-tests.rst#monitors-sleep-at-least-minheartbeatfreqencyms-between-checks + # https://github.com/mongodb/specifications/blob/master/source/server-discovery-and-monitoring/server-discovery-and-monitoring-tests.md#monitors-sleep-at-least-minheartbeatfreqencyms-between-checks fail_hello = { "mode": {"times": 5}, "data": { diff --git a/test/unified_format.py b/test/unified_format.py index be7fc1f8ad..0da6168303 100644 --- a/test/unified_format.py +++ b/test/unified_format.py @@ -14,7 +14,7 @@ """Unified test format runner. -https://github.com/mongodb/specifications/blob/master/source/unified-test-format/unified-test-format.rst +https://github.com/mongodb/specifications/blob/master/source/unified-test-format/unified-test-format.md """ from __future__ import annotations @@ -431,7 +431,7 @@ class UnifiedSpecTestMixinV1(IntegrationTest): """Mixin class to run test cases from test specification files. Assumes that tests conform to the `unified test format - `_. + `_. Specification of the test suite being currently run is available as a class attribute ``TEST_SPEC``. diff --git a/test/unified_format_shared.py b/test/unified_format_shared.py index f1b908a7a6..f315a77f48 100644 --- a/test/unified_format_shared.py +++ b/test/unified_format_shared.py @@ -14,7 +14,7 @@ """Shared utility functions and constants for the unified test format runner. -https://github.com/mongodb/specifications/blob/master/source/unified-test-format/unified-test-format.rst +https://github.com/mongodb/specifications/blob/master/source/unified-test-format/unified-test-format.md """ from __future__ import annotations From 32269aac1e9c34c2660329fc911a32e3cac78906 Mon Sep 17 00:00:00 2001 From: Shane Harvey Date: Thu, 31 Oct 2024 14:01:30 -0700 Subject: [PATCH 084/182] PYTHON-4885 Fix legacy extended JSON encoding of DatetimeMS (#1986) --- bson/json_util.py | 43 ++++++++++++++++++++++-------------------- doc/changelog.rst | 5 +++++ test/test_json_util.py | 25 +++++++++++++++++++++--- 3 files changed, 50 insertions(+), 23 deletions(-) diff --git a/bson/json_util.py b/bson/json_util.py index a171327ead..ecae103b55 100644 --- a/bson/json_util.py +++ b/bson/json_util.py @@ -617,25 +617,28 @@ def _parse_canonical_datetime( raise TypeError(f"Bad $date, extra field(s): {doc}") # mongoexport 2.6 and newer if isinstance(dtm, str): - # Parse offset - if dtm[-1] == "Z": - dt = dtm[:-1] - offset = "Z" - elif dtm[-6] in ("+", "-") and dtm[-3] == ":": - # (+|-)HH:MM - dt = dtm[:-6] - offset = dtm[-6:] - elif dtm[-5] in ("+", "-"): - # (+|-)HHMM - dt = dtm[:-5] - offset = dtm[-5:] - elif dtm[-3] in ("+", "-"): - # (+|-)HH - dt = dtm[:-3] - offset = dtm[-3:] - else: - dt = dtm - offset = "" + try: + # Parse offset + if dtm[-1] == "Z": + dt = dtm[:-1] + offset = "Z" + elif dtm[-6] in ("+", "-") and dtm[-3] == ":": + # (+|-)HH:MM + dt = dtm[:-6] + offset = dtm[-6:] + elif dtm[-5] in ("+", "-"): + # (+|-)HHMM + dt = dtm[:-5] + offset = dtm[-5:] + elif dtm[-3] in ("+", "-"): + # (+|-)HH + dt = dtm[:-3] + offset = dtm[-3:] + else: + dt = dtm + offset = "" + except IndexError as exc: + raise ValueError(f"time data {dtm!r} does not match ISO-8601 datetime format") from exc # Parse the optional factional seconds portion. dot_index = dt.rfind(".") @@ -848,7 +851,7 @@ def _encode_datetimems(obj: Any, json_options: JSONOptions) -> dict: ): return _encode_datetime(obj.as_datetime(), json_options) elif json_options.datetime_representation == DatetimeRepresentation.LEGACY: - return {"$date": str(int(obj))} + return {"$date": int(obj)} return {"$date": {"$numberLong": str(int(obj))}} diff --git a/doc/changelog.rst b/doc/changelog.rst index bd4eafe3ef..d9e6cc3f5b 100644 --- a/doc/changelog.rst +++ b/doc/changelog.rst @@ -28,6 +28,11 @@ PyMongo 4.11 brings a number of changes including: :meth:`~pymongo.asynchronous.mongo_client.AsyncMongoClient.bulk_write` now throw an error when ``ordered=True`` or ``verboseResults=True`` are used with unacknowledged writes. These are unavoidable breaking changes. +- Fixed a bug in :const:`bson.json_util.dumps` where a :class:`bson.datetime_ms.DatetimeMS` would + be incorrectly encoded as ``'{"$date": "X"}'`` instead of ``'{"$date": X}'`` when using the + legacy MongoDB Extended JSON datetime representation. +- Fixed a bug where :const:`bson.json_util.loads` would raise an IndexError when parsing an invalid + ``"$date"`` instead of a ValueError. Issues Resolved ............... diff --git a/test/test_json_util.py b/test/test_json_util.py index 3a40c174e8..821ca76da0 100644 --- a/test/test_json_util.py +++ b/test/test_json_util.py @@ -137,7 +137,7 @@ def test_datetime(self): '{"dt": { "$date" : "1970-01-01T00:00:00.000Z"}}', '{"dt": { "$date" : "1970-01-01T00:00:00.000000Z"}}', '{"dt": { "$date" : "1970-01-01T00:00:00Z"}}', - '{"dt": {"$date": "1970-01-01T00:00:00.000"}}', + '{"dt": { "$date" : "1970-01-01T00:00:00.000"}}', '{"dt": { "$date" : "1970-01-01T00:00:00"}}', '{"dt": { "$date" : "1970-01-01T00:00:00.000000"}}', '{"dt": { "$date" : "1969-12-31T16:00:00.000-0800"}}', @@ -282,9 +282,9 @@ def test_datetime_ms(self): opts = JSONOptions( datetime_representation=DatetimeRepresentation.LEGACY, json_mode=JSONMode.LEGACY ) - self.assertEqual('{"x": {"$date": "-1"}}', json_util.dumps(dat_min, json_options=opts)) + self.assertEqual('{"x": {"$date": -1}}', json_util.dumps(dat_min, json_options=opts)) self.assertEqual( - '{"x": {"$date": "' + str(int(dat_max["x"])) + '"}}', + '{"x": {"$date": ' + str(int(dat_max["x"])) + "}}", json_util.dumps(dat_max, json_options=opts), ) @@ -317,6 +317,25 @@ def test_datetime_ms(self): json_util.loads(json_util.dumps(dat_max), json_options=opts)["x"], ) + def test_parse_invalid_date(self): + # These cases should raise ValueError, not IndexError. + for invalid in [ + '{"dt": { "$date" : "1970-01-01T00:00:"}}', + '{"dt": { "$date" : "1970-01-01T01:00"}}', + '{"dt": { "$date" : "1970-01-01T01:"}}', + '{"dt": { "$date" : "1970-01-01T01"}}', + '{"dt": { "$date" : "1970-01-01T"}}', + '{"dt": { "$date" : "1970-01-01"}}', + '{"dt": { "$date" : "1970-01-"}}', + '{"dt": { "$date" : "1970-01"}}', + '{"dt": { "$date" : "1970-"}}', + '{"dt": { "$date" : "1970"}}', + '{"dt": { "$date" : "1"}}', + '{"dt": { "$date" : ""}}', + ]: + with self.assertRaisesRegex(ValueError, "does not match"): + json_util.loads(invalid) + def test_regex_object_hook(self): # Extended JSON format regular expression. pat = "a*b" From 9f53f299679af7aab03149dc9f91b0b4dce290bb Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Fri, 1 Nov 2024 08:24:52 -0500 Subject: [PATCH 085/182] PYTHON-4906 Add branch creation workflow to Python Driver (#1971) --- .github/workflows/create-release-branch.yml | 55 +++++++++++++++++++++ 1 file changed, 55 insertions(+) create mode 100644 .github/workflows/create-release-branch.yml diff --git a/.github/workflows/create-release-branch.yml b/.github/workflows/create-release-branch.yml new file mode 100644 index 0000000000..f24f94179a --- /dev/null +++ b/.github/workflows/create-release-branch.yml @@ -0,0 +1,55 @@ +name: Create Release Branch + +on: + workflow_dispatch: + inputs: + branch_name: + description: The name of the new branch + required: true + version: + description: The version to set on the branch + required: true + base_ref: + description: The base reference for the branch + push_changes: + description: Whether to push the changes + default: "true" + +concurrency: + group: create-branch-${{ github.ref }} + cancel-in-progress: true + +defaults: + run: + shell: bash -eux {0} + +jobs: + create-branch: + environment: release + runs-on: ubuntu-latest + permissions: + id-token: write + contents: write + outputs: + version: ${{ steps.pre-publish.outputs.version }} + steps: + - uses: mongodb-labs/drivers-github-tools/secure-checkout@v2 + with: + app_id: ${{ vars.APP_ID }} + private_key: ${{ secrets.APP_PRIVATE_KEY }} + - uses: mongodb-labs/drivers-github-tools/setup@v2 + with: + aws_role_arn: ${{ secrets.AWS_ROLE_ARN }} + aws_region_name: ${{ vars.AWS_REGION_NAME }} + aws_secret_id: ${{ secrets.AWS_SECRET_ID }} + artifactory_username: ${{ vars.ARTIFACTORY_USERNAME }} + - uses: mongodb-labs/drivers-github-tools/create-branch@v2 + id: create-branch + with: + branch_name: ${{ inputs.branch_name }} + version: ${{ inputs.version }} + base_ref: ${{ inputs.base_ref }} + push_changes: ${{ inputs.push_changes }} + version_bump_script: hatch version + evergreen_project: mongo-python-driver-release + release_workflow_path: ./.github/workflows/release-python.yml From 260322277d519ade76fe85157e8bc2f18c49dfcf Mon Sep 17 00:00:00 2001 From: Noah Stapp Date: Fri, 1 Nov 2024 13:20:37 -0400 Subject: [PATCH 086/182] PYTHON-4926 - Skip tests with errorCodeName on Serverless (#1989) --- .../unified/commit-retry.json | 5 +++++ .../unified/commit-writeconcernerror.json | 17 ++++++++++++++++- test/transactions/unified/retryable-commit.json | 5 +++++ 3 files changed, 26 insertions(+), 1 deletion(-) diff --git a/test/transactions-convenient-api/unified/commit-retry.json b/test/transactions-convenient-api/unified/commit-retry.json index 928f0167e4..cc80201167 100644 --- a/test/transactions-convenient-api/unified/commit-retry.json +++ b/test/transactions-convenient-api/unified/commit-retry.json @@ -422,6 +422,11 @@ }, { "description": "commit is not retried after MaxTimeMSExpired error", + "runOnRequirements": [ + { + "serverless": "forbid" + } + ], "operations": [ { "name": "failPoint", diff --git a/test/transactions-convenient-api/unified/commit-writeconcernerror.json b/test/transactions-convenient-api/unified/commit-writeconcernerror.json index a6f6e6bd7f..a455a450bf 100644 --- a/test/transactions-convenient-api/unified/commit-writeconcernerror.json +++ b/test/transactions-convenient-api/unified/commit-writeconcernerror.json @@ -1,6 +1,6 @@ { "description": "commit-writeconcernerror", - "schemaVersion": "1.3", + "schemaVersion": "1.4", "runOnRequirements": [ { "minServerVersion": "4.0", @@ -414,6 +414,11 @@ }, { "description": "commitTransaction is not retried after UnknownReplWriteConcern error", + "runOnRequirements": [ + { + "serverless": "forbid" + } + ], "operations": [ { "name": "failPoint", @@ -546,6 +551,11 @@ }, { "description": "commitTransaction is not retried after UnsatisfiableWriteConcern error", + "runOnRequirements": [ + { + "serverless": "forbid" + } + ], "operations": [ { "name": "failPoint", @@ -678,6 +688,11 @@ }, { "description": "commitTransaction is not retried after MaxTimeMSExpired error", + "runOnRequirements": [ + { + "serverless": "forbid" + } + ], "operations": [ { "name": "failPoint", diff --git a/test/transactions/unified/retryable-commit.json b/test/transactions/unified/retryable-commit.json index b794c1c55c..7d7e52495d 100644 --- a/test/transactions/unified/retryable-commit.json +++ b/test/transactions/unified/retryable-commit.json @@ -89,6 +89,11 @@ "tests": [ { "description": "commitTransaction fails after Interrupted", + "runOnRequirements": [ + { + "serverless": "forbid" + } + ], "operations": [ { "object": "testRunner", From f3343aa952720c1151e73ce38d53d0288095ebd1 Mon Sep 17 00:00:00 2001 From: Noah Stapp Date: Fri, 1 Nov 2024 13:20:59 -0400 Subject: [PATCH 087/182] =?UTF-8?q?PYTHON-4916=20-=20URI=20options=20spec?= =?UTF-8?q?=20tests=20specify=20empty=20options=20when=20the=20i=E2=80=A6?= =?UTF-8?q?=20(#1991)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- test/uri_options/auth-options.json | 5 +- test/uri_options/compression-options.json | 6 +- test/uri_options/concern-options.json | 6 +- test/uri_options/connection-options.json | 38 +++--- test/uri_options/connection-pool-options.json | 8 +- test/uri_options/sdam-options.json | 2 +- test/uri_options/single-threaded-options.json | 2 +- test/uri_options/srv-options.json | 12 +- test/uri_options/tls-options.json | 110 +++++++++--------- 9 files changed, 95 insertions(+), 94 deletions(-) diff --git a/test/uri_options/auth-options.json b/test/uri_options/auth-options.json index fadbac35d2..d7fa14a134 100644 --- a/test/uri_options/auth-options.json +++ b/test/uri_options/auth-options.json @@ -2,7 +2,7 @@ "tests": [ { "description": "Valid auth options are parsed correctly (GSSAPI)", - "uri": "mongodb://foo:bar@example.com/?authMechanism=GSSAPI&authMechanismProperties=SERVICE_NAME:other,CANONICALIZE_HOST_NAME:true&authSource=$external", + "uri": "mongodb://foo:bar@example.com/?authMechanism=GSSAPI&authMechanismProperties=SERVICE_NAME:other,CANONICALIZE_HOST_NAME:forward,SERVICE_HOST:example.com&authSource=$external", "valid": true, "warning": false, "hosts": null, @@ -11,7 +11,8 @@ "authMechanism": "GSSAPI", "authMechanismProperties": { "SERVICE_NAME": "other", - "CANONICALIZE_HOST_NAME": true + "SERVICE_HOST": "example.com", + "CANONICALIZE_HOST_NAME": "forward" }, "authSource": "$external" } diff --git a/test/uri_options/compression-options.json b/test/uri_options/compression-options.json index 16bd27b2cc..3c13dee062 100644 --- a/test/uri_options/compression-options.json +++ b/test/uri_options/compression-options.json @@ -35,7 +35,7 @@ "warning": true, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "Too low zlibCompressionLevel causes a warning", @@ -44,7 +44,7 @@ "warning": true, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "Too high zlibCompressionLevel causes a warning", @@ -53,7 +53,7 @@ "warning": true, "hosts": null, "auth": null, - "options": {} + "options": null } ] } diff --git a/test/uri_options/concern-options.json b/test/uri_options/concern-options.json index 5a8ef6c272..f55f298087 100644 --- a/test/uri_options/concern-options.json +++ b/test/uri_options/concern-options.json @@ -43,7 +43,7 @@ "warning": true, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "Too low wTimeoutMS causes a warning", @@ -52,7 +52,7 @@ "warning": true, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "Invalid journal causes a warning", @@ -61,7 +61,7 @@ "warning": true, "hosts": null, "auth": null, - "options": {} + "options": null } ] } diff --git a/test/uri_options/connection-options.json b/test/uri_options/connection-options.json index b2669b6cf1..bbaa295ecb 100644 --- a/test/uri_options/connection-options.json +++ b/test/uri_options/connection-options.json @@ -27,7 +27,7 @@ "warning": true, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "Too low connectTimeoutMS causes a warning", @@ -36,7 +36,7 @@ "warning": true, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "Non-numeric heartbeatFrequencyMS causes a warning", @@ -45,7 +45,7 @@ "warning": true, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "Too low heartbeatFrequencyMS causes a warning", @@ -54,7 +54,7 @@ "warning": true, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "Non-numeric localThresholdMS causes a warning", @@ -63,7 +63,7 @@ "warning": true, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "Too low localThresholdMS causes a warning", @@ -72,7 +72,7 @@ "warning": true, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "Invalid retryWrites causes a warning", @@ -81,7 +81,7 @@ "warning": true, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "Non-numeric serverSelectionTimeoutMS causes a warning", @@ -90,7 +90,7 @@ "warning": true, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "Too low serverSelectionTimeoutMS causes a warning", @@ -99,7 +99,7 @@ "warning": true, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "Non-numeric socketTimeoutMS causes a warning", @@ -108,7 +108,7 @@ "warning": true, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "Too low socketTimeoutMS causes a warning", @@ -117,7 +117,7 @@ "warning": true, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "directConnection=true", @@ -137,7 +137,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "directConnection=false", @@ -168,7 +168,7 @@ "warning": true, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "loadBalanced=true", @@ -211,7 +211,7 @@ "warning": true, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "loadBalanced=true with multiple hosts causes an error", @@ -220,7 +220,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "loadBalanced=true with directConnection=true causes an error", @@ -229,7 +229,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "loadBalanced=true with replicaSet causes an error", @@ -238,7 +238,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "timeoutMS=0", @@ -258,7 +258,7 @@ "warning": true, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "Too low timeoutMS causes a warning", @@ -267,7 +267,7 @@ "warning": true, "hosts": null, "auth": null, - "options": {} + "options": null } ] } diff --git a/test/uri_options/connection-pool-options.json b/test/uri_options/connection-pool-options.json index 118b2f6783..a582867d07 100644 --- a/test/uri_options/connection-pool-options.json +++ b/test/uri_options/connection-pool-options.json @@ -21,7 +21,7 @@ "warning": true, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "Too low maxIdleTimeMS causes a warning", @@ -30,7 +30,7 @@ "warning": true, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "maxPoolSize=0 does not error", @@ -61,7 +61,7 @@ "warning": true, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "maxConnecting<0 causes a warning", @@ -70,7 +70,7 @@ "warning": true, "hosts": null, "auth": null, - "options": {} + "options": null } ] } diff --git a/test/uri_options/sdam-options.json b/test/uri_options/sdam-options.json index 673f5607ee..ae0aeb2821 100644 --- a/test/uri_options/sdam-options.json +++ b/test/uri_options/sdam-options.json @@ -40,7 +40,7 @@ "warning": true, "hosts": null, "auth": null, - "options": {} + "options": null } ] } diff --git a/test/uri_options/single-threaded-options.json b/test/uri_options/single-threaded-options.json index fcd24fb880..80ac3fa4ee 100644 --- a/test/uri_options/single-threaded-options.json +++ b/test/uri_options/single-threaded-options.json @@ -18,7 +18,7 @@ "warning": true, "hosts": null, "auth": null, - "options": {} + "options": null } ] } diff --git a/test/uri_options/srv-options.json b/test/uri_options/srv-options.json index ffc356f12f..0670612c0d 100644 --- a/test/uri_options/srv-options.json +++ b/test/uri_options/srv-options.json @@ -18,7 +18,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "SRV URI with srvMaxHosts", @@ -38,7 +38,7 @@ "warning": true, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "SRV URI with invalid type for srvMaxHosts", @@ -47,7 +47,7 @@ "warning": true, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "Non-SRV URI with srvMaxHosts", @@ -56,7 +56,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "SRV URI with positive srvMaxHosts and replicaSet", @@ -65,7 +65,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "SRV URI with positive srvMaxHosts and loadBalanced=true", @@ -74,7 +74,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "SRV URI with positive srvMaxHosts and loadBalanced=false", diff --git a/test/uri_options/tls-options.json b/test/uri_options/tls-options.json index 8beaaddd86..526cde1cbe 100644 --- a/test/uri_options/tls-options.json +++ b/test/uri_options/tls-options.json @@ -31,7 +31,7 @@ "warning": true, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsAllowInvalidCertificates is parsed correctly", @@ -62,7 +62,7 @@ "warning": true, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsInsecure is parsed correctly", @@ -82,7 +82,7 @@ "warning": true, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsInsecure and tlsAllowInvalidCertificates both present (and true) raises an error", @@ -91,7 +91,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsInsecure and tlsAllowInvalidCertificates both present (and false) raises an error", @@ -100,7 +100,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsAllowInvalidCertificates and tlsInsecure both present (and true) raises an error", @@ -109,7 +109,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsAllowInvalidCertificates and tlsInsecure both present (and false) raises an error", @@ -118,7 +118,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsInsecure and tlsAllowInvalidHostnames both present (and true) raises an error", @@ -127,7 +127,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsInsecure and tlsAllowInvalidHostnames both present (and false) raises an error", @@ -136,7 +136,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsAllowInvalidHostnames and tlsInsecure both present (and true) raises an error", @@ -145,7 +145,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsAllowInvalidHostnames and tlsInsecure both present (and false) raises an error", @@ -154,7 +154,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tls=true and ssl=true doesn't warn", @@ -199,7 +199,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tls=true and ssl=false raises error", @@ -208,7 +208,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "ssl=false and tls=true raises error", @@ -217,7 +217,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "ssl=true and tls=false raises error", @@ -226,7 +226,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsDisableCertificateRevocationCheck can be set to true", @@ -259,7 +259,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsAllowInvalidCertificates=true and tlsDisableCertificateRevocationCheck=false raises an error", @@ -268,7 +268,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsAllowInvalidCertificates=false and tlsDisableCertificateRevocationCheck=true raises an error", @@ -277,7 +277,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsAllowInvalidCertificates and tlsDisableCertificateRevocationCheck both present (and false) raises an error", @@ -286,7 +286,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsDisableCertificateRevocationCheck and tlsAllowInvalidCertificates both present (and true) raises an error", @@ -295,7 +295,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsDisableCertificateRevocationCheck=true and tlsAllowInvalidCertificates=false raises an error", @@ -304,7 +304,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsDisableCertificateRevocationCheck=false and tlsAllowInvalidCertificates=true raises an error", @@ -313,7 +313,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsDisableCertificateRevocationCheck and tlsAllowInvalidCertificates both present (and false) raises an error", @@ -322,7 +322,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsInsecure and tlsDisableCertificateRevocationCheck both present (and true) raises an error", @@ -331,7 +331,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsInsecure=true and tlsDisableCertificateRevocationCheck=false raises an error", @@ -340,7 +340,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsInsecure=false and tlsDisableCertificateRevocationCheck=true raises an error", @@ -349,7 +349,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsInsecure and tlsDisableCertificateRevocationCheck both present (and false) raises an error", @@ -358,7 +358,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsDisableCertificateRevocationCheck and tlsInsecure both present (and true) raises an error", @@ -367,7 +367,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsDisableCertificateRevocationCheck=true and tlsInsecure=false raises an error", @@ -376,7 +376,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsDisableCertificateRevocationCheck=false and tlsInsecure=true raises an error", @@ -385,7 +385,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsDisableCertificateRevocationCheck and tlsInsecure both present (and false) raises an error", @@ -394,7 +394,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsDisableCertificateRevocationCheck and tlsDisableOCSPEndpointCheck both present (and true) raises an error", @@ -403,7 +403,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsDisableCertificateRevocationCheck=true and tlsDisableOCSPEndpointCheck=false raises an error", @@ -412,7 +412,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsDisableCertificateRevocationCheck=false and tlsDisableOCSPEndpointCheck=true raises an error", @@ -421,7 +421,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsDisableCertificateRevocationCheck and tlsDisableOCSPEndpointCheck both present (and false) raises an error", @@ -430,7 +430,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsDisableOCSPEndpointCheck and tlsDisableCertificateRevocationCheck both present (and true) raises an error", @@ -439,7 +439,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsDisableOCSPEndpointCheck=true and tlsDisableCertificateRevocationCheck=false raises an error", @@ -448,7 +448,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsDisableOCSPEndpointCheck=false and tlsDisableCertificateRevocationCheck=true raises an error", @@ -457,7 +457,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsDisableOCSPEndpointCheck and tlsDisableCertificateRevocationCheck both present (and false) raises an error", @@ -466,7 +466,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsDisableOCSPEndpointCheck can be set to true", @@ -499,7 +499,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsInsecure=true and tlsDisableOCSPEndpointCheck=false raises an error", @@ -508,7 +508,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsInsecure=false and tlsDisableOCSPEndpointCheck=true raises an error", @@ -517,7 +517,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsInsecure and tlsDisableOCSPEndpointCheck both present (and false) raises an error", @@ -526,7 +526,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsDisableOCSPEndpointCheck and tlsInsecure both present (and true) raises an error", @@ -535,7 +535,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsDisableOCSPEndpointCheck=true and tlsInsecure=false raises an error", @@ -544,7 +544,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsDisableOCSPEndpointCheck=false and tlsInsecure=true raises an error", @@ -553,7 +553,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsDisableOCSPEndpointCheck and tlsInsecure both present (and false) raises an error", @@ -562,7 +562,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsAllowInvalidCertificates and tlsDisableOCSPEndpointCheck both present (and true) raises an error", @@ -571,7 +571,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsAllowInvalidCertificates=true and tlsDisableOCSPEndpointCheck=false raises an error", @@ -580,7 +580,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsAllowInvalidCertificates=false and tlsDisableOCSPEndpointCheck=true raises an error", @@ -589,7 +589,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsAllowInvalidCertificates and tlsDisableOCSPEndpointCheck both present (and false) raises an error", @@ -598,7 +598,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsDisableOCSPEndpointCheck and tlsAllowInvalidCertificates both present (and true) raises an error", @@ -607,7 +607,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsDisableOCSPEndpointCheck=true and tlsAllowInvalidCertificates=false raises an error", @@ -616,7 +616,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsDisableOCSPEndpointCheck=false and tlsAllowInvalidCertificates=true raises an error", @@ -625,7 +625,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "tlsDisableOCSPEndpointCheck and tlsAllowInvalidCertificates both present (and false) raises an error", @@ -634,7 +634,7 @@ "warning": false, "hosts": null, "auth": null, - "options": {} + "options": null } ] } From c680f6342750593def5cb67d2d4c617ca23e3bbd Mon Sep 17 00:00:00 2001 From: Noah Stapp Date: Fri, 1 Nov 2024 13:21:07 -0400 Subject: [PATCH 088/182] PYTHON-4917 - Test that inserts and upserts respect null _id values (#1992) --- test/crud/unified/create-null-ids.json | 253 +++++++++++++++++++++++++ 1 file changed, 253 insertions(+) create mode 100644 test/crud/unified/create-null-ids.json diff --git a/test/crud/unified/create-null-ids.json b/test/crud/unified/create-null-ids.json new file mode 100644 index 0000000000..8e0c3ac5d1 --- /dev/null +++ b/test/crud/unified/create-null-ids.json @@ -0,0 +1,253 @@ +{ + "description": "create-null-ids", + "schemaVersion": "1.0", + "createEntities": [ + { + "client": { + "id": "client", + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "database", + "client": "client", + "databaseName": "crud_id" + } + }, + { + "collection": { + "id": "collection", + "database": "database", + "collectionName": "type_tests" + } + } + ], + "initialData": [ + { + "collectionName": "type_tests", + "databaseName": "crud_id", + "documents": [] + } + ], + "tests": [ + { + "description": "inserting _id with type null via insertOne", + "operations": [ + { + "name": "insertOne", + "object": "collection", + "arguments": { + "document": { + "_id": null + } + } + }, + { + "name": "countDocuments", + "object": "collection", + "arguments": { + "filter": { + "_id": { + "$type": "null" + } + } + }, + "expectResult": 1 + } + ] + }, + { + "description": "inserting _id with type null via insertMany", + "operations": [ + { + "name": "insertMany", + "object": "collection", + "arguments": { + "documents": [ + { + "_id": null + } + ] + } + }, + { + "name": "countDocuments", + "object": "collection", + "arguments": { + "filter": { + "_id": { + "$type": "null" + } + } + }, + "expectResult": 1 + } + ] + }, + { + "description": "inserting _id with type null via updateOne", + "operations": [ + { + "name": "updateOne", + "object": "collection", + "arguments": { + "filter": { + "_id": null + }, + "update": { + "$unset": { + "a": "" + } + }, + "upsert": true + } + }, + { + "name": "countDocuments", + "object": "collection", + "arguments": { + "filter": { + "_id": { + "$type": "null" + } + } + }, + "expectResult": 1 + } + ] + }, + { + "description": "inserting _id with type null via updateMany", + "operations": [ + { + "name": "updateMany", + "object": "collection", + "arguments": { + "filter": { + "_id": null + }, + "update": { + "$unset": { + "a": "" + } + }, + "upsert": true + } + }, + { + "name": "countDocuments", + "object": "collection", + "arguments": { + "filter": { + "_id": { + "$type": "null" + } + } + }, + "expectResult": 1 + } + ] + }, + { + "description": "inserting _id with type null via replaceOne", + "operations": [ + { + "name": "replaceOne", + "object": "collection", + "arguments": { + "filter": {}, + "replacement": { + "_id": null + }, + "upsert": true + } + }, + { + "name": "countDocuments", + "object": "collection", + "arguments": { + "filter": { + "_id": { + "$type": "null" + } + } + }, + "expectResult": 1 + } + ] + }, + { + "description": "inserting _id with type null via bulkWrite", + "operations": [ + { + "name": "bulkWrite", + "object": "collection", + "arguments": { + "requests": [ + { + "insertOne": { + "document": { + "_id": null + } + } + } + ] + } + }, + { + "name": "countDocuments", + "object": "collection", + "arguments": { + "filter": { + "_id": { + "$type": "null" + } + } + }, + "expectResult": 1 + } + ] + }, + { + "description": "inserting _id with type null via clientBulkWrite", + "runOnRequirements": [ + { + "minServerVersion": "8.0" + } + ], + "operations": [ + { + "name": "clientBulkWrite", + "object": "client", + "arguments": { + "models": [ + { + "insertOne": { + "namespace": "crud_id.type_tests", + "document": { + "_id": null + } + } + } + ] + } + }, + { + "name": "countDocuments", + "object": "collection", + "arguments": { + "filter": { + "_id": { + "$type": "null" + } + } + }, + "expectResult": 1 + } + ] + } + ] +} From 6862e94d176c6ee7959d3d114aff301774ecf7df Mon Sep 17 00:00:00 2001 From: Noah Stapp Date: Fri, 1 Nov 2024 13:21:17 -0400 Subject: [PATCH 089/182] PYTHON-4923 - Add mixed case tests for read preference tags (#1990) --- test/uri_options/read-preference-options.json | 21 ++++++++++++++++--- 1 file changed, 18 insertions(+), 3 deletions(-) diff --git a/test/uri_options/read-preference-options.json b/test/uri_options/read-preference-options.json index cdac6a63c3..abbf0d0cc6 100644 --- a/test/uri_options/read-preference-options.json +++ b/test/uri_options/read-preference-options.json @@ -36,6 +36,21 @@ ] } }, + { + "description": "Read preference tags are case sensitive", + "uri": "mongodb://example.com/?readPreference=secondary&readPreferenceTags=dc:NY", + "valid": true, + "warning": false, + "hosts": null, + "auth": null, + "options": { + "readPreferenceTags": [ + { + "dc": "NY" + } + ] + } + }, { "description": "Invalid readPreferenceTags causes a warning", "uri": "mongodb://example.com/?readPreferenceTags=invalid", @@ -43,7 +58,7 @@ "warning": true, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "Non-numeric maxStalenessSeconds causes a warning", @@ -52,7 +67,7 @@ "warning": true, "hosts": null, "auth": null, - "options": {} + "options": null }, { "description": "Too low maxStalenessSeconds causes a warning", @@ -61,7 +76,7 @@ "warning": true, "hosts": null, "auth": null, - "options": {} + "options": null } ] } From a9caaf0d6a7b40234dfb13720684c914dfd3e633 Mon Sep 17 00:00:00 2001 From: Noah Stapp Date: Mon, 4 Nov 2024 09:24:29 -0500 Subject: [PATCH 090/182] =?UTF-8?q?PYTHON-4941=20-=20Fix=20Synchronous=20u?= =?UTF-8?q?nified=20test=20runner=20being=20used=20in=20async=E2=80=A6=20(?= =?UTF-8?q?#1993)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- test/asynchronous/test_auth_spec.py | 2 +- test/asynchronous/test_change_stream.py | 2 +- test/asynchronous/test_connection_logging.py | 2 +- test/asynchronous/test_crud_unified.py | 2 +- test/asynchronous/test_encryption.py | 2 +- test/asynchronous/unified_format.py | 2 +- test/unified_format.py | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/test/asynchronous/test_auth_spec.py b/test/asynchronous/test_auth_spec.py index a6ab1cb331..e9e43d5759 100644 --- a/test/asynchronous/test_auth_spec.py +++ b/test/asynchronous/test_auth_spec.py @@ -25,7 +25,7 @@ sys.path[0:0] = [""] from test import unittest -from test.unified_format import generate_test_classes +from test.asynchronous.unified_format import generate_test_classes from pymongo import AsyncMongoClient from pymongo.asynchronous.auth_oidc import OIDCCallback diff --git a/test/asynchronous/test_change_stream.py b/test/asynchronous/test_change_stream.py index 98641f46ee..8e16fe7528 100644 --- a/test/asynchronous/test_change_stream.py +++ b/test/asynchronous/test_change_stream.py @@ -35,7 +35,7 @@ async_client_context, unittest, ) -from test.unified_format import generate_test_classes +from test.asynchronous.unified_format import generate_test_classes from test.utils import ( AllowListEventListener, EventListener, diff --git a/test/asynchronous/test_connection_logging.py b/test/asynchronous/test_connection_logging.py index 6bc9835b70..945c6c59b5 100644 --- a/test/asynchronous/test_connection_logging.py +++ b/test/asynchronous/test_connection_logging.py @@ -22,7 +22,7 @@ sys.path[0:0] = [""] from test import unittest -from test.unified_format import generate_test_classes +from test.asynchronous.unified_format import generate_test_classes _IS_SYNC = False diff --git a/test/asynchronous/test_crud_unified.py b/test/asynchronous/test_crud_unified.py index 3d8deb36e9..e6f42d5bdf 100644 --- a/test/asynchronous/test_crud_unified.py +++ b/test/asynchronous/test_crud_unified.py @@ -22,7 +22,7 @@ sys.path[0:0] = [""] from test import unittest -from test.unified_format import generate_test_classes +from test.asynchronous.unified_format import generate_test_classes _IS_SYNC = False diff --git a/test/asynchronous/test_encryption.py b/test/asynchronous/test_encryption.py index e42c85aa7a..767b3ecf0a 100644 --- a/test/asynchronous/test_encryption.py +++ b/test/asynchronous/test_encryption.py @@ -46,6 +46,7 @@ unittest, ) from test.asynchronous.test_bulk import AsyncBulkTestBase +from test.asynchronous.unified_format import generate_test_classes from test.asynchronous.utils_spec_runner import AsyncSpecRunner from test.helpers import ( AWS_CREDS, @@ -56,7 +57,6 @@ KMIP_CREDS, LOCAL_MASTER_KEY, ) -from test.unified_format import generate_test_classes from test.utils import ( AllowListEventListener, OvertCommandListener, diff --git a/test/asynchronous/unified_format.py b/test/asynchronous/unified_format.py index b382db474f..f8179dc0c7 100644 --- a/test/asynchronous/unified_format.py +++ b/test/asynchronous/unified_format.py @@ -862,7 +862,7 @@ async def _clientEncryptionOperation_createDataKey(self, target, *args, **kwargs return await target.create_data_key(*args, **kwargs) async def _clientEncryptionOperation_getKeys(self, target, *args, **kwargs): - return await (await target.get_keys(*args, **kwargs)).to_list() + return await target.get_keys(*args, **kwargs).to_list() async def _clientEncryptionOperation_deleteKey(self, target, *args, **kwargs): result = await target.delete_key(*args, **kwargs) diff --git a/test/unified_format.py b/test/unified_format.py index 0da6168303..80c37470e3 100644 --- a/test/unified_format.py +++ b/test/unified_format.py @@ -858,7 +858,7 @@ def _clientEncryptionOperation_createDataKey(self, target, *args, **kwargs): return target.create_data_key(*args, **kwargs) def _clientEncryptionOperation_getKeys(self, target, *args, **kwargs): - return (target.get_keys(*args, **kwargs)).to_list() + return target.get_keys(*args, **kwargs).to_list() def _clientEncryptionOperation_deleteKey(self, target, *args, **kwargs): result = target.delete_key(*args, **kwargs) From 57fd616ace819ac4d8535b7009c2b079b6097d57 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Mon, 4 Nov 2024 10:26:07 -0600 Subject: [PATCH 091/182] PYTHON-4330 Add Kubernetes Support for OIDC (#1759) --- .evergreen/config.yml | 107 +++++++++++++-------- .evergreen/generated_configs/variants.yml | 9 +- .evergreen/run-mongodb-oidc-remote-test.sh | 60 ++++++++++++ .evergreen/run-mongodb-oidc-test.sh | 3 + .evergreen/scripts/generate_config.py | 8 +- pymongo/auth_oidc_shared.py | 14 +++ pymongo/auth_shared.py | 4 + test/auth/legacy/connection-string.json | 20 ++++ test/auth_oidc/test_auth_oidc.py | 48 ++------- test/unified_format_shared.py | 2 + 10 files changed, 190 insertions(+), 85 deletions(-) create mode 100755 .evergreen/run-mongodb-oidc-remote-test.sh diff --git a/.evergreen/config.yml b/.evergreen/config.yml index fda6864317..9de7a85e26 100644 --- a/.evergreen/config.yml +++ b/.evergreen/config.yml @@ -520,6 +520,18 @@ functions: args: - .evergreen/run-mongodb-oidc-test.sh + "run oidc k8s auth test": + - command: subprocess.exec + type: test + params: + binary: bash + working_dir: src + env: + OIDC_ENV: k8s + include_expansions_in_env: ["DRIVERS_TOOLS", "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN", "K8S_VARIANT"] + args: + - ${PROJECT_DIRECTORY}/.evergreen/run-mongodb-oidc-remote-test.sh + "run aws auth test with aws credentials as environment variables": - command: shell.exec type: test @@ -873,6 +885,32 @@ task_groups: tasks: - oidc-auth-test-gcp + - name: testk8soidc_task_group + setup_group: + - func: fetch source + - func: prepare resources + - func: fix absolute paths + - func: make files executable + - command: ec2.assume_role + params: + role_arn: ${aws_test_secrets_role} + duration_seconds: 1800 + - command: subprocess.exec + params: + binary: bash + args: + - ${DRIVERS_TOOLS}/.evergreen/auth_oidc/k8s/setup.sh + teardown_task: + - command: subprocess.exec + params: + binary: bash + args: + - ${DRIVERS_TOOLS}/.evergreen/auth_oidc/k8s/teardown.sh + setup_group_can_fail_task: true + setup_group_timeout_secs: 1800 + tasks: + - oidc-auth-test-k8s + - name: testoidc_task_group setup_group: - func: fetch source @@ -1548,40 +1586,41 @@ tasks: - name: "oidc-auth-test-azure" commands: - - command: shell.exec + - command: subprocess.exec type: test params: - shell: bash - script: |- - set -o errexit - . src/.evergreen/scripts/env.sh - cd src - git add . - git commit -m "add files" - export AZUREOIDC_DRIVERS_TAR_FILE=/tmp/mongo-python-driver.tgz - git archive -o $AZUREOIDC_DRIVERS_TAR_FILE HEAD - export AZUREOIDC_TEST_CMD="OIDC_ENV=azure ./.evergreen/run-mongodb-oidc-test.sh" - bash $DRIVERS_TOOLS/.evergreen/auth_oidc/azure/run-driver-test.sh + binary: bash + working_dir: src + env: + OIDC_ENV: azure + include_expansions_in_env: ["DRIVERS_TOOLS"] + args: + - ${PROJECT_DIRECTORY}/.evergreen/run-mongodb-oidc-remote-test.sh - name: "oidc-auth-test-gcp" commands: - - command: shell.exec + - command: subprocess.exec type: test params: - shell: bash - script: |- - set -o errexit - . src/.evergreen/scripts/env.sh - cd src - git add . - git commit -m "add files" - export GCPOIDC_DRIVERS_TAR_FILE=/tmp/mongo-python-driver.tgz - git archive -o $GCPOIDC_DRIVERS_TAR_FILE HEAD - # Define the command to run on the VM. - # Ensure that we source the environment file created for us, set up any other variables we need, - # and then run our test suite on the vm. - export GCPOIDC_TEST_CMD="OIDC_ENV=gcp ./.evergreen/run-mongodb-oidc-test.sh" - bash $DRIVERS_TOOLS/.evergreen/auth_oidc/gcp/run-driver-test.sh + binary: bash + working_dir: src + env: + OIDC_ENV: gcp + include_expansions_in_env: ["DRIVERS_TOOLS"] + args: + - ${PROJECT_DIRECTORY}/.evergreen/run-mongodb-oidc-remote-test.sh + + - name: "oidc-auth-test-k8s" + commands: + - func: "run oidc k8s auth test" + vars: + K8S_VARIANT: eks + - func: "run oidc k8s auth test" + vars: + K8S_VARIANT: gke + - func: "run oidc k8s auth test" + vars: + K8S_VARIANT: aks # }}} - name: "coverage-report" tags: ["coverage"] @@ -1740,20 +1779,6 @@ buildvariants: tasks: - name: "coverage-report" -- name: testazureoidc-variant - display_name: "OIDC Auth Azure" - run_on: ubuntu2204-small - tasks: - - name: testazureoidc_task_group - batchtime: 20160 # Use a batchtime of 14 days as suggested by the CSFLE test README - -- name: testgcpoidc-variant - display_name: "OIDC Auth GCP" - run_on: ubuntu2204-small - tasks: - - name: testgcpoidc_task_group - batchtime: 20160 # Use a batchtime of 14 days as suggested by the CSFLE test README - - name: testgcpkms-variant display_name: "GCP KMS" run_on: diff --git a/.evergreen/generated_configs/variants.yml b/.evergreen/generated_configs/variants.yml index 0a4e5cfb14..c2a9a70016 100644 --- a/.evergreen/generated_configs/variants.yml +++ b/.evergreen/generated_configs/variants.yml @@ -955,12 +955,15 @@ buildvariants: VERSION: "8.0" # Oidc auth tests - - name: oidc-auth-rhel8 + - name: oidc-auth-ubuntu-22 tasks: - name: testoidc_task_group - display_name: OIDC Auth RHEL8 + - name: testazureoidc_task_group + - name: testgcpoidc_task_group + - name: testk8soidc_task_group + display_name: OIDC Auth Ubuntu-22 run_on: - - rhel87-small + - ubuntu2204-small batchtime: 20160 - name: oidc-auth-macos tasks: diff --git a/.evergreen/run-mongodb-oidc-remote-test.sh b/.evergreen/run-mongodb-oidc-remote-test.sh new file mode 100755 index 0000000000..bb90bddf07 --- /dev/null +++ b/.evergreen/run-mongodb-oidc-remote-test.sh @@ -0,0 +1,60 @@ +#!/bin/bash + +set +x # Disable debug trace +set -eu + +echo "Running MONGODB-OIDC remote tests" + +OIDC_ENV=${OIDC_ENV:-"test"} + +# Make sure DRIVERS_TOOLS is set. +if [ -z "$DRIVERS_TOOLS" ]; then + echo "Must specify DRIVERS_TOOLS" + exit 1 +fi + +# Set up the remote files to test. +git add . +git commit -m "add files" || true +export TEST_TAR_FILE=/tmp/mongo-python-driver.tgz +git archive -o $TEST_TAR_FILE HEAD + +pushd $DRIVERS_TOOLS + +if [ $OIDC_ENV == "test" ]; then + echo "Test OIDC environment does not support remote test!" + exit 1 + +elif [ $OIDC_ENV == "azure" ]; then + export AZUREOIDC_DRIVERS_TAR_FILE=$TEST_TAR_FILE + export AZUREOIDC_TEST_CMD="OIDC_ENV=azure ./.evergreen/run-mongodb-oidc-test.sh" + bash ./.evergreen/auth_oidc/azure/run-driver-test.sh + +elif [ $OIDC_ENV == "gcp" ]; then + export GCPOIDC_DRIVERS_TAR_FILE=$TEST_TAR_FILE + export GCPOIDC_TEST_CMD="OIDC_ENV=gcp ./.evergreen/run-mongodb-oidc-test.sh" + bash ./.evergreen/auth_oidc/gcp/run-driver-test.sh + +elif [ $OIDC_ENV == "k8s" ]; then + # Make sure K8S_VARIANT is set. + if [ -z "$K8S_VARIANT" ]; then + echo "Must specify K8S_VARIANT" + popd + exit 1 + fi + + bash ./.evergreen/auth_oidc/k8s/setup-pod.sh + bash ./.evergreen/auth_oidc/k8s/run-self-test.sh + export K8S_DRIVERS_TAR_FILE=$TEST_TAR_FILE + export K8S_TEST_CMD="OIDC_ENV=k8s ./.evergreen/run-mongodb-oidc-test.sh" + source ./.evergreen/auth_oidc/k8s/secrets-export.sh # for MONGODB_URI + bash ./.evergreen/auth_oidc/k8s/run-driver-test.sh + bash ./.evergreen/auth_oidc/k8s/teardown-pod.sh + +else + echo "Unrecognized OIDC_ENV $OIDC_ENV" + pod + exit 1 +fi + +popd diff --git a/.evergreen/run-mongodb-oidc-test.sh b/.evergreen/run-mongodb-oidc-test.sh index 0c34912c8a..22864528c0 100755 --- a/.evergreen/run-mongodb-oidc-test.sh +++ b/.evergreen/run-mongodb-oidc-test.sh @@ -21,6 +21,9 @@ elif [ $OIDC_ENV == "azure" ]; then elif [ $OIDC_ENV == "gcp" ]; then source ./secrets-export.sh +elif [ $OIDC_ENV == "k8s" ]; then + echo "Running oidc on k8s" + else echo "Unrecognized OIDC_ENV $OIDC_ENV" exit 1 diff --git a/.evergreen/scripts/generate_config.py b/.evergreen/scripts/generate_config.py index 9abcc6516a..dfaad0f835 100644 --- a/.evergreen/scripts/generate_config.py +++ b/.evergreen/scripts/generate_config.py @@ -615,10 +615,14 @@ def create_serverless_variants(): def create_oidc_auth_variants(): variants = [] - for host in ["rhel8", "macos", "win64"]: + other_tasks = ["testazureoidc_task_group", "testgcpoidc_task_group", "testk8soidc_task_group"] + for host in ["ubuntu22", "macos", "win64"]: + tasks = ["testoidc_task_group"] + if host == "ubuntu22": + tasks += other_tasks variants.append( create_variant( - ["testoidc_task_group"], + tasks, get_display_name("OIDC Auth", host), host=host, batchtime=BATCHTIME_WEEK * 2, diff --git a/pymongo/auth_oidc_shared.py b/pymongo/auth_oidc_shared.py index 5e3603fa31..9e0acaf6c8 100644 --- a/pymongo/auth_oidc_shared.py +++ b/pymongo/auth_oidc_shared.py @@ -116,3 +116,17 @@ def __init__(self, token_resource: str) -> None: def fetch(self, context: OIDCCallbackContext) -> OIDCCallbackResult: resp = _get_gcp_response(self.token_resource, context.timeout_seconds) return OIDCCallbackResult(access_token=resp["access_token"]) + + +class _OIDCK8SCallback(OIDCCallback): + def fetch(self, context: OIDCCallbackContext) -> OIDCCallbackResult: + return OIDCCallbackResult(access_token=_get_k8s_token()) + + +def _get_k8s_token() -> str: + fname = "/var/run/secrets/kubernetes.io/serviceaccount/token" + for key in ["AZURE_FEDERATED_TOKEN_FILE", "AWS_WEB_IDENTITY_TOKEN_FILE"]: + if key in os.environ: + fname = os.environ[key] + with open(fname) as fid: + return fid.read() diff --git a/pymongo/auth_shared.py b/pymongo/auth_shared.py index fa25aa3faa..1e1ce7b4d8 100644 --- a/pymongo/auth_shared.py +++ b/pymongo/auth_shared.py @@ -26,6 +26,7 @@ from pymongo.auth_oidc_shared import ( _OIDCAzureCallback, _OIDCGCPCallback, + _OIDCK8SCallback, _OIDCProperties, _OIDCTestCallback, ) @@ -192,6 +193,9 @@ def _build_credentials_tuple( "GCP provider for MONGODB-OIDC requires a TOKEN_RESOURCE auth mechanism property" ) callback = _OIDCGCPCallback(token_resource) + elif environ == "k8s": + passwd = None + callback = _OIDCK8SCallback() else: raise ConfigurationError(f"unrecognized ENVIRONMENT for MONGODB-OIDC: {environ}") else: diff --git a/test/auth/legacy/connection-string.json b/test/auth/legacy/connection-string.json index 67aafbff6e..3a099c8137 100644 --- a/test/auth/legacy/connection-string.json +++ b/test/auth/legacy/connection-string.json @@ -626,6 +626,26 @@ "uri": "mongodb://user:pass@localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=ENVIRONMENT:gcp", "valid": false, "credential": null + }, + { + "description": "should recognise the mechanism with k8s provider (MONGODB-OIDC)", + "uri": "mongodb://localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=ENVIRONMENT:k8s", + "valid": true, + "credential": { + "username": null, + "password": null, + "source": "$external", + "mechanism": "MONGODB-OIDC", + "mechanism_properties": { + "ENVIRONMENT": "k8s" + } + } + }, + { + "description": "should throw an error for a username and password with k8s provider (MONGODB-OIDC)", + "uri": "mongodb://user:pass@localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=ENVIRONMENT:k8s", + "valid": false, + "credential": null } ] } diff --git a/test/auth_oidc/test_auth_oidc.py b/test/auth_oidc/test_auth_oidc.py index 6526391daf..a0127304c1 100644 --- a/test/auth_oidc/test_auth_oidc.py +++ b/test/auth_oidc/test_auth_oidc.py @@ -37,6 +37,7 @@ from pymongo import MongoClient from pymongo._azure_helpers import _get_azure_response from pymongo._gcp_helpers import _get_gcp_response +from pymongo.auth_oidc_shared import _get_k8s_token from pymongo.cursor_shared import CursorType from pymongo.errors import AutoReconnect, ConfigurationError, OperationFailure from pymongo.hello import HelloCompat @@ -84,6 +85,10 @@ def get_token(self, username=None): opts = parse_uri(self.uri_single)["options"] token_aud = opts["authmechanismproperties"]["TOKEN_RESOURCE"] return _get_gcp_response(token_aud, username)["access_token"] + elif ENVIRON == "k8s": + return _get_k8s_token() + else: + raise ValueError(f"Unknown ENVIRON: {ENVIRON}") @contextmanager def fail_point(self, command_args): @@ -758,7 +763,9 @@ def create_client(self, *args, **kwargs): kwargs["retryReads"] = False if not len(args): args = [self.uri_single] - return MongoClient(*args, authmechanismproperties=props, **kwargs) + client = MongoClient(*args, authmechanismproperties=props, **kwargs) + self.addCleanup(client.close) + return client def test_1_1_callback_is_called_during_reauthentication(self): # Create a ``MongoClient`` configured with a custom OIDC callback that @@ -768,8 +775,6 @@ def test_1_1_callback_is_called_during_reauthentication(self): client.test.test.find_one() # Assert that the callback was called 1 time. self.assertEqual(self.request_called, 1) - # Close the client. - client.close() def test_1_2_callback_is_called_once_for_multiple_connections(self): # Create a ``MongoClient`` configured with a custom OIDC callback that @@ -790,8 +795,6 @@ def target(): thread.join() # Assert that the callback was called 1 time. self.assertEqual(self.request_called, 1) - # Close the client. - client.close() def test_2_1_valid_callback_inputs(self): # Create a MongoClient configured with an OIDC callback that validates its inputs and returns a valid access token. @@ -800,8 +803,6 @@ def test_2_1_valid_callback_inputs(self): client.test.test.find_one() # Assert that the OIDC callback was called with the appropriate inputs, including the timeout parameter if possible. Ensure that there are no unexpected fields. self.assertEqual(self.request_called, 1) - # Close the client. - client.close() def test_2_2_oidc_callback_returns_null(self): # Create a MongoClient configured with an OIDC callback that returns null. @@ -813,8 +814,6 @@ def fetch(self, a): # Perform a find operation that fails. with self.assertRaises(ValueError): client.test.test.find_one() - # Close the client. - client.close() def test_2_3_oidc_callback_returns_missing_data(self): # Create a MongoClient configured with an OIDC callback that returns data not conforming to the OIDCCredential with missing fields. @@ -829,8 +828,6 @@ def fetch(self, a): # Perform a find operation that fails. with self.assertRaises(ValueError): client.test.test.find_one() - # Close the client. - client.close() def test_2_4_invalid_client_configuration_with_callback(self): # Create a MongoClient configured with an OIDC callback and auth mechanism property ENVIRONMENT:test. @@ -870,8 +867,6 @@ def test_3_1_authentication_failure_with_cached_tokens_fetch_a_new_token_and_ret client.test.test.find_one() # Verify that the callback was called 1 time. self.assertEqual(self.request_called, 1) - # Close the client. - client.close() def test_3_2_authentication_failures_without_cached_tokens_returns_an_error(self): # Create a MongoClient configured with retryReads=false and an OIDC callback that always returns invalid access tokens. @@ -889,8 +884,6 @@ def fetch(self, a): client.test.test.find_one() # Verify that the callback was called 1 time. self.assertEqual(callback.count, 1) - # Close the client. - client.close() def test_3_3_unexpected_error_code_does_not_clear_cache(self): # Create a ``MongoClient`` with a human callback that returns a valid token @@ -916,9 +909,6 @@ def test_3_3_unexpected_error_code_does_not_clear_cache(self): # Assert that the callback has been called once. self.assertEqual(self.request_called, 1) - # Close the client. - client.close() - def test_4_1_reauthentication_succeds(self): # Create a ``MongoClient`` configured with a custom OIDC callback that # implements the provider logic. @@ -938,9 +928,6 @@ def test_4_1_reauthentication_succeds(self): # handshake, and again during reauthentication). self.assertEqual(self.request_called, 2) - # Close the client. - client.close() - def test_4_2_read_commands_fail_if_reauthentication_fails(self): # Create a ``MongoClient`` whose OIDC callback returns one good token and then # bad tokens after the first call. @@ -977,9 +964,6 @@ def fetch(self, _): # Verify that the callback was called 2 times. self.assertEqual(callback.count, 2) - # Close the client. - client.close() - def test_4_3_write_commands_fail_if_reauthentication_fails(self): # Create a ``MongoClient`` whose OIDC callback returns one good token and then # bad token after the first call. @@ -1016,12 +1000,9 @@ def fetch(self, _): # Verify that the callback was called 2 times. self.assertEqual(callback.count, 2) - # Close the client. - client.close() - def test_4_4_speculative_authentication_should_be_ignored_on_reauthentication(self): # Create an OIDC configured client that can listen for `SaslStart` commands. - listener = OvertCommandListener() + listener = EventListener() client = self.create_client(event_listeners=[listener]) # Preload the *Client Cache* with a valid access token to enforce Speculative Authentication. @@ -1061,9 +1042,6 @@ def test_4_4_speculative_authentication_should_be_ignored_on_reauthentication(se # Assert there were `SaslStart` commands executed. assert any(event.command_name.lower() == "saslstart" for event in listener.started_events) - # Close the client. - client.close() - def test_5_1_azure_with_no_username(self): if ENVIRON != "azure": raise unittest.SkipTest("Test is only supported on Azure") @@ -1073,7 +1051,6 @@ def test_5_1_azure_with_no_username(self): props = dict(TOKEN_RESOURCE=resource, ENVIRONMENT="azure") client = self.create_client(authMechanismProperties=props) client.test.test.find_one() - client.close() def test_5_2_azure_with_bad_username(self): if ENVIRON != "azure": @@ -1086,7 +1063,6 @@ def test_5_2_azure_with_bad_username(self): client = self.create_client(username="bad", authmechanismproperties=props) with self.assertRaises(ValueError): client.test.test.find_one() - client.close() def test_speculative_auth_success(self): client1 = self.create_client() @@ -1108,10 +1084,6 @@ def test_speculative_auth_success(self): # Perform a find operation. client2.test.test.find_one() - # Close the clients. - client2.close() - client1.close() - def test_reauthentication_succeeds_multiple_connections(self): client1 = self.create_client() client2 = self.create_client() @@ -1151,8 +1123,6 @@ def test_reauthentication_succeeds_multiple_connections(self): client2.test.test.find_one() self.assertEqual(self.request_called, 3) - client1.close() - client2.close() if __name__ == "__main__": diff --git a/test/unified_format_shared.py b/test/unified_format_shared.py index f315a77f48..1c87fb3f18 100644 --- a/test/unified_format_shared.py +++ b/test/unified_format_shared.py @@ -137,6 +137,8 @@ "ENVIRONMENT": "gcp", "TOKEN_RESOURCE": os.environ["GCPOIDC_AUDIENCE"], } +elif OIDC_ENV == "k8s": + PLACEHOLDER_MAP["/uriOptions/authMechanismProperties"] = {"ENVIRONMENT": "k8s"} def with_metaclass(meta, *bases): From 81bef719339888401d6803a1fd0331376495bff7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 4 Nov 2024 11:41:09 -0600 Subject: [PATCH 092/182] Bump pyright from 1.1.384 to 1.1.385 (#1954) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Jib Co-authored-by: Steven Silvester --- requirements/typing.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/typing.txt b/requirements/typing.txt index db0825c2b1..b1f07604dc 100644 --- a/requirements/typing.txt +++ b/requirements/typing.txt @@ -1,5 +1,5 @@ mypy==1.13.0 -pyright==1.1.384 +pyright==1.1.385 typing_extensions -r ./encryption.txt -r ./ocsp.txt From 91d0d897c0843a7c9939815f3e4628541c49de7b Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Mon, 4 Nov 2024 11:46:37 -0600 Subject: [PATCH 093/182] PYTHON-4943 Clean up EVG Variant Display Names (#1994) --- .evergreen/config.yml | 17 +- .evergreen/generated_configs/variants.yml | 290 ++++++++++++++-------- .evergreen/scripts/generate_config.py | 33 +-- 3 files changed, 216 insertions(+), 124 deletions(-) diff --git a/.evergreen/config.yml b/.evergreen/config.yml index 9de7a85e26..fc1713a88e 100644 --- a/.evergreen/config.yml +++ b/.evergreen/config.yml @@ -1766,7 +1766,7 @@ tasks: buildvariants: - name: "no-server" - display_name: "No server test" + display_name: "No server" run_on: - rhel84-small tasks: @@ -1779,31 +1779,26 @@ buildvariants: tasks: - name: "coverage-report" -- name: testgcpkms-variant - display_name: "GCP KMS" +- name: testkms-variant + display_name: "KMS" run_on: - debian11-small tasks: - name: testgcpkms_task_group batchtime: 20160 # Use a batchtime of 14 days as suggested by the CSFLE test README - testgcpkms-fail-task - -- name: testazurekms-variant - display_name: "Azure KMS" - run_on: debian11-small - tasks: - name: testazurekms_task_group batchtime: 20160 # Use a batchtime of 14 days as suggested by the CSFLE test README - testazurekms-fail-task - name: rhel8-test-lambda - display_name: AWS Lambda handler tests + display_name: FaaS Lambda run_on: rhel87-small tasks: - name: test_aws_lambda_task_group - name: rhel8-import-time - display_name: Import Time Check + display_name: Import Time run_on: rhel87-small tasks: - name: "check-import-time" @@ -1816,7 +1811,7 @@ buildvariants: - name: "backport-pr" - name: "perf-tests" - display_name: "Performance Benchmark Tests" + display_name: "Performance Benchmarks" batchtime: 10080 # 7 days run_on: rhel90-dbx-perf-large tasks: diff --git a/.evergreen/generated_configs/variants.yml b/.evergreen/generated_configs/variants.yml index c2a9a70016..9ee51899f4 100644 --- a/.evergreen/generated_configs/variants.yml +++ b/.evergreen/generated_configs/variants.yml @@ -106,7 +106,7 @@ buildvariants: PYTHON_BINARY: /opt/python/3.13/bin/python3 # Aws auth tests - - name: aws-auth-ubuntu-20-py3.9 + - name: auth-aws-ubuntu-20-py3.9 tasks: - name: aws-auth-test-4.4 - name: aws-auth-test-5.0 @@ -115,12 +115,12 @@ buildvariants: - name: aws-auth-test-8.0 - name: aws-auth-test-rapid - name: aws-auth-test-latest - display_name: AWS Auth Ubuntu-20 py3.9 + display_name: Auth AWS Ubuntu-20 py3.9 run_on: - ubuntu2004-small expansions: PYTHON_BINARY: /opt/python/3.9/bin/python3 - - name: aws-auth-ubuntu-20-py3.13 + - name: auth-aws-ubuntu-20-py3.13 tasks: - name: aws-auth-test-4.4 - name: aws-auth-test-5.0 @@ -129,12 +129,12 @@ buildvariants: - name: aws-auth-test-8.0 - name: aws-auth-test-rapid - name: aws-auth-test-latest - display_name: AWS Auth Ubuntu-20 py3.13 + display_name: Auth AWS Ubuntu-20 py3.13 run_on: - ubuntu2004-small expansions: PYTHON_BINARY: /opt/python/3.13/bin/python3 - - name: aws-auth-win64-py3.9 + - name: auth-aws-win64-py3.9 tasks: - name: aws-auth-test-4.4 - name: aws-auth-test-5.0 @@ -143,13 +143,13 @@ buildvariants: - name: aws-auth-test-8.0 - name: aws-auth-test-rapid - name: aws-auth-test-latest - display_name: AWS Auth Win64 py3.9 + display_name: Auth AWS Win64 py3.9 run_on: - windows-64-vsMulti-small expansions: skip_ECS_auth_test: "true" PYTHON_BINARY: C:/python/Python39/python.exe - - name: aws-auth-win64-py3.13 + - name: auth-aws-win64-py3.13 tasks: - name: aws-auth-test-4.4 - name: aws-auth-test-5.0 @@ -158,13 +158,13 @@ buildvariants: - name: aws-auth-test-8.0 - name: aws-auth-test-rapid - name: aws-auth-test-latest - display_name: AWS Auth Win64 py3.13 + display_name: Auth AWS Win64 py3.13 run_on: - windows-64-vsMulti-small expansions: skip_ECS_auth_test: "true" PYTHON_BINARY: C:/python/Python313/python.exe - - name: aws-auth-macos-py3.9 + - name: auth-aws-macos-py3.9 tasks: - name: aws-auth-test-4.4 - name: aws-auth-test-5.0 @@ -173,7 +173,7 @@ buildvariants: - name: aws-auth-test-8.0 - name: aws-auth-test-rapid - name: aws-auth-test-latest - display_name: AWS Auth macOS py3.9 + display_name: Auth AWS macOS py3.9 run_on: - macos-14 expansions: @@ -181,7 +181,7 @@ buildvariants: skip_EC2_auth_test: "true" skip_web_identity_auth_test: "true" PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 - - name: aws-auth-macos-py3.13 + - name: auth-aws-macos-py3.13 tasks: - name: aws-auth-test-4.4 - name: aws-auth-test-5.0 @@ -190,7 +190,7 @@ buildvariants: - name: aws-auth-test-8.0 - name: aws-auth-test-rapid - name: aws-auth-test-latest - display_name: AWS Auth macOS py3.13 + display_name: Auth AWS macOS py3.13 run_on: - macos-14 expansions: @@ -200,85 +200,85 @@ buildvariants: PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 # Compression tests - - name: snappy-compression-rhel8-py3.9-no-c + - name: compression-snappy-rhel8-py3.9-no-c tasks: - name: .standalone .noauth .nossl .sync_async - display_name: snappy compression RHEL8 py3.9 No C + display_name: Compression snappy RHEL8 py3.9 No C run_on: - rhel87-small expansions: COMPRESSORS: snappy NO_EXT: "1" PYTHON_BINARY: /opt/python/3.9/bin/python3 - - name: snappy-compression-rhel8-py3.10 + - name: compression-snappy-rhel8-py3.10 tasks: - name: .standalone .noauth .nossl .sync_async - display_name: snappy compression RHEL8 py3.10 + display_name: Compression snappy RHEL8 py3.10 run_on: - rhel87-small expansions: COMPRESSORS: snappy PYTHON_BINARY: /opt/python/3.10/bin/python3 - - name: zlib-compression-rhel8-py3.11-no-c + - name: compression-zlib-rhel8-py3.11-no-c tasks: - name: .standalone .noauth .nossl .sync_async - display_name: zlib compression RHEL8 py3.11 No C + display_name: Compression zlib RHEL8 py3.11 No C run_on: - rhel87-small expansions: COMPRESSORS: zlib NO_EXT: "1" PYTHON_BINARY: /opt/python/3.11/bin/python3 - - name: zlib-compression-rhel8-py3.12 + - name: compression-zlib-rhel8-py3.12 tasks: - name: .standalone .noauth .nossl .sync_async - display_name: zlib compression RHEL8 py3.12 + display_name: Compression zlib RHEL8 py3.12 run_on: - rhel87-small expansions: COMPRESSORS: zlib PYTHON_BINARY: /opt/python/3.12/bin/python3 - - name: zstd-compression-rhel8-py3.13-no-c + - name: compression-zstd-rhel8-py3.13-no-c tasks: - name: .standalone .noauth .nossl .sync_async !.4.0 - display_name: zstd compression RHEL8 py3.13 No C + display_name: Compression zstd RHEL8 py3.13 No C run_on: - rhel87-small expansions: COMPRESSORS: zstd NO_EXT: "1" PYTHON_BINARY: /opt/python/3.13/bin/python3 - - name: zstd-compression-rhel8-py3.9 + - name: compression-zstd-rhel8-py3.9 tasks: - name: .standalone .noauth .nossl .sync_async !.4.0 - display_name: zstd compression RHEL8 py3.9 + display_name: Compression zstd RHEL8 py3.9 run_on: - rhel87-small expansions: COMPRESSORS: zstd PYTHON_BINARY: /opt/python/3.9/bin/python3 - - name: snappy-compression-rhel8-pypy3.9 + - name: compression-snappy-rhel8-pypy3.9 tasks: - name: .standalone .noauth .nossl .sync_async - display_name: snappy compression RHEL8 pypy3.9 + display_name: Compression snappy RHEL8 pypy3.9 run_on: - rhel87-small expansions: COMPRESSORS: snappy PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 - - name: zlib-compression-rhel8-pypy3.10 + - name: compression-zlib-rhel8-pypy3.10 tasks: - name: .standalone .noauth .nossl .sync_async - display_name: zlib compression RHEL8 pypy3.10 + display_name: Compression zlib RHEL8 pypy3.10 run_on: - rhel87-small expansions: COMPRESSORS: zlib PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 - - name: zstd-compression-rhel8-pypy3.9 + - name: compression-zstd-rhel8-pypy3.9 tasks: - name: .standalone .noauth .nossl .sync_async !.4.0 - display_name: zstd compression RHEL8 pypy3.9 + display_name: Compression zstd RHEL8 pypy3.9 run_on: - rhel87-small expansions: @@ -564,64 +564,64 @@ buildvariants: tags: [encryption_tag] # Enterprise auth tests - - name: enterprise-auth-macos-py3.9-auth + - name: auth-enterprise-macos-py3.9-auth tasks: - name: test-enterprise-auth - display_name: Enterprise Auth macOS py3.9 Auth + display_name: Auth Enterprise macOS py3.9 Auth run_on: - macos-14 expansions: AUTH: auth PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 - - name: enterprise-auth-rhel8-py3.10-auth + - name: auth-enterprise-rhel8-py3.10-auth tasks: - name: test-enterprise-auth - display_name: Enterprise Auth RHEL8 py3.10 Auth + display_name: Auth Enterprise RHEL8 py3.10 Auth run_on: - rhel87-small expansions: AUTH: auth PYTHON_BINARY: /opt/python/3.10/bin/python3 - - name: enterprise-auth-rhel8-py3.11-auth + - name: auth-enterprise-rhel8-py3.11-auth tasks: - name: test-enterprise-auth - display_name: Enterprise Auth RHEL8 py3.11 Auth + display_name: Auth Enterprise RHEL8 py3.11 Auth run_on: - rhel87-small expansions: AUTH: auth PYTHON_BINARY: /opt/python/3.11/bin/python3 - - name: enterprise-auth-rhel8-py3.12-auth + - name: auth-enterprise-rhel8-py3.12-auth tasks: - name: test-enterprise-auth - display_name: Enterprise Auth RHEL8 py3.12 Auth + display_name: Auth Enterprise RHEL8 py3.12 Auth run_on: - rhel87-small expansions: AUTH: auth PYTHON_BINARY: /opt/python/3.12/bin/python3 - - name: enterprise-auth-win64-py3.13-auth + - name: auth-enterprise-win64-py3.13-auth tasks: - name: test-enterprise-auth - display_name: Enterprise Auth Win64 py3.13 Auth + display_name: Auth Enterprise Win64 py3.13 Auth run_on: - windows-64-vsMulti-small expansions: AUTH: auth PYTHON_BINARY: C:/python/Python313/python.exe - - name: enterprise-auth-rhel8-pypy3.9-auth + - name: auth-enterprise-rhel8-pypy3.9-auth tasks: - name: test-enterprise-auth - display_name: Enterprise Auth RHEL8 pypy3.9 Auth + display_name: Auth Enterprise RHEL8 pypy3.9 Auth run_on: - rhel87-small expansions: AUTH: auth PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 - - name: enterprise-auth-rhel8-pypy3.10-auth + - name: auth-enterprise-rhel8-pypy3.10-auth tasks: - name: test-enterprise-auth - display_name: Enterprise Auth RHEL8 pypy3.10 Auth + display_name: Auth Enterprise RHEL8 pypy3.10 Auth run_on: - rhel87-small expansions: @@ -629,10 +629,10 @@ buildvariants: PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 # Green framework tests - - name: eventlet-rhel8-py3.9 + - name: green-eventlet-rhel8-py3.9 tasks: - name: .standalone .noauth .nossl .sync_async - display_name: Eventlet RHEL8 py3.9 + display_name: Green Eventlet RHEL8 py3.9 run_on: - rhel87-small expansions: @@ -640,10 +640,10 @@ buildvariants: AUTH: auth SSL: ssl PYTHON_BINARY: /opt/python/3.9/bin/python3 - - name: gevent-rhel8-py3.9 + - name: green-gevent-rhel8-py3.9 tasks: - name: .standalone .noauth .nossl .sync_async - display_name: Gevent RHEL8 py3.9 + display_name: Green Gevent RHEL8 py3.9 run_on: - rhel87-small expansions: @@ -651,10 +651,10 @@ buildvariants: AUTH: auth SSL: ssl PYTHON_BINARY: /opt/python/3.9/bin/python3 - - name: eventlet-rhel8-py3.12 + - name: green-eventlet-rhel8-py3.12 tasks: - name: .standalone .noauth .nossl .sync_async - display_name: Eventlet RHEL8 py3.12 + display_name: Green Eventlet RHEL8 py3.12 run_on: - rhel87-small expansions: @@ -662,10 +662,10 @@ buildvariants: AUTH: auth SSL: ssl PYTHON_BINARY: /opt/python/3.12/bin/python3 - - name: gevent-rhel8-py3.12 + - name: green-gevent-rhel8-py3.12 tasks: - name: .standalone .noauth .nossl .sync_async - display_name: Gevent RHEL8 py3.12 + display_name: Green Gevent RHEL8 py3.12 run_on: - rhel87-small expansions: @@ -727,10 +727,10 @@ buildvariants: VERSION: latest # Mockupdb tests - - name: mockupdb-tests-rhel8-py3.9 + - name: mockupdb-rhel8-py3.9 tasks: - name: mockupdb - display_name: MockupDB Tests RHEL8 py3.9 + display_name: MockupDB RHEL8 py3.9 run_on: - rhel87-small expansions: @@ -810,10 +810,10 @@ buildvariants: PYTHON_BINARY: /opt/python/3.13/bin/python3 # Ocsp tests - - name: ocsp-test-rhel8-v4.4-py3.9 + - name: ocsp-rhel8-v4.4-py3.9 tasks: - name: .ocsp - display_name: OCSP test RHEL8 v4.4 py3.9 + display_name: OCSP RHEL8 v4.4 py3.9 run_on: - rhel87-small batchtime: 20160 @@ -823,10 +823,10 @@ buildvariants: TOPOLOGY: server PYTHON_BINARY: /opt/python/3.9/bin/python3 VERSION: "4.4" - - name: ocsp-test-rhel8-v5.0-py3.10 + - name: ocsp-rhel8-v5.0-py3.10 tasks: - name: .ocsp - display_name: OCSP test RHEL8 v5.0 py3.10 + display_name: OCSP RHEL8 v5.0 py3.10 run_on: - rhel87-small batchtime: 20160 @@ -836,10 +836,10 @@ buildvariants: TOPOLOGY: server PYTHON_BINARY: /opt/python/3.10/bin/python3 VERSION: "5.0" - - name: ocsp-test-rhel8-v6.0-py3.11 + - name: ocsp-rhel8-v6.0-py3.11 tasks: - name: .ocsp - display_name: OCSP test RHEL8 v6.0 py3.11 + display_name: OCSP RHEL8 v6.0 py3.11 run_on: - rhel87-small batchtime: 20160 @@ -849,10 +849,10 @@ buildvariants: TOPOLOGY: server PYTHON_BINARY: /opt/python/3.11/bin/python3 VERSION: "6.0" - - name: ocsp-test-rhel8-v7.0-py3.12 + - name: ocsp-rhel8-v7.0-py3.12 tasks: - name: .ocsp - display_name: OCSP test RHEL8 v7.0 py3.12 + display_name: OCSP RHEL8 v7.0 py3.12 run_on: - rhel87-small batchtime: 20160 @@ -862,10 +862,10 @@ buildvariants: TOPOLOGY: server PYTHON_BINARY: /opt/python/3.12/bin/python3 VERSION: "7.0" - - name: ocsp-test-rhel8-v8.0-py3.13 + - name: ocsp-rhel8-v8.0-py3.13 tasks: - name: .ocsp - display_name: OCSP test RHEL8 v8.0 py3.13 + display_name: OCSP RHEL8 v8.0 py3.13 run_on: - rhel87-small batchtime: 20160 @@ -875,10 +875,10 @@ buildvariants: TOPOLOGY: server PYTHON_BINARY: /opt/python/3.13/bin/python3 VERSION: "8.0" - - name: ocsp-test-rhel8-rapid-pypy3.9 + - name: ocsp-rhel8-rapid-pypy3.9 tasks: - name: .ocsp - display_name: OCSP test RHEL8 rapid pypy3.9 + display_name: OCSP RHEL8 rapid pypy3.9 run_on: - rhel87-small batchtime: 20160 @@ -888,10 +888,10 @@ buildvariants: TOPOLOGY: server PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 VERSION: rapid - - name: ocsp-test-rhel8-latest-pypy3.10 + - name: ocsp-rhel8-latest-pypy3.10 tasks: - name: .ocsp - display_name: OCSP test RHEL8 latest pypy3.10 + display_name: OCSP RHEL8 latest pypy3.10 run_on: - rhel87-small batchtime: 20160 @@ -901,10 +901,10 @@ buildvariants: TOPOLOGY: server PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 VERSION: latest - - name: ocsp-test-win64-v4.4-py3.9 + - name: ocsp-win64-v4.4-py3.9 tasks: - name: .ocsp-rsa !.ocsp-staple - display_name: OCSP test Win64 v4.4 py3.9 + display_name: OCSP Win64 v4.4 py3.9 run_on: - windows-64-vsMulti-small batchtime: 20160 @@ -914,10 +914,10 @@ buildvariants: TOPOLOGY: server PYTHON_BINARY: C:/python/Python39/python.exe VERSION: "4.4" - - name: ocsp-test-win64-v8.0-py3.13 + - name: ocsp-win64-v8.0-py3.13 tasks: - name: .ocsp-rsa !.ocsp-staple - display_name: OCSP test Win64 v8.0 py3.13 + display_name: OCSP Win64 v8.0 py3.13 run_on: - windows-64-vsMulti-small batchtime: 20160 @@ -927,10 +927,10 @@ buildvariants: TOPOLOGY: server PYTHON_BINARY: C:/python/Python313/python.exe VERSION: "8.0" - - name: ocsp-test-macos-v4.4-py3.9 + - name: ocsp-macos-v4.4-py3.9 tasks: - name: .ocsp-rsa !.ocsp-staple - display_name: OCSP test macOS v4.4 py3.9 + display_name: OCSP macOS v4.4 py3.9 run_on: - macos-14 batchtime: 20160 @@ -940,10 +940,10 @@ buildvariants: TOPOLOGY: server PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 VERSION: "4.4" - - name: ocsp-test-macos-v8.0-py3.13 + - name: ocsp-macos-v8.0-py3.13 tasks: - name: .ocsp-rsa !.ocsp-staple - display_name: OCSP test macOS v8.0 py3.13 + display_name: OCSP macOS v8.0 py3.13 run_on: - macos-14 batchtime: 20160 @@ -955,27 +955,27 @@ buildvariants: VERSION: "8.0" # Oidc auth tests - - name: oidc-auth-ubuntu-22 + - name: auth-oidc-ubuntu-22 tasks: - name: testoidc_task_group - name: testazureoidc_task_group - name: testgcpoidc_task_group - name: testk8soidc_task_group - display_name: OIDC Auth Ubuntu-22 + display_name: Auth OIDC Ubuntu-22 run_on: - ubuntu2204-small batchtime: 20160 - - name: oidc-auth-macos + - name: auth-oidc-macos tasks: - name: testoidc_task_group - display_name: OIDC Auth macOS + display_name: Auth OIDC macOS run_on: - macos-14 batchtime: 20160 - - name: oidc-auth-win64 + - name: auth-oidc-win64 tasks: - name: testoidc_task_group - display_name: OIDC Auth Win64 + display_name: Auth OIDC Win64 run_on: - windows-64-vsMulti-small batchtime: 20160 @@ -1075,7 +1075,7 @@ buildvariants: - name: .standalone .sync_async - name: .replica_set .sync_async - name: .sharded_cluster .sync_async - display_name: Test RHEL8 py3.9 cov + display_name: "* Test RHEL8 py3.9 cov" run_on: - rhel87-small expansions: @@ -1087,7 +1087,7 @@ buildvariants: - name: .standalone .sync_async - name: .replica_set .sync_async - name: .sharded_cluster .sync_async - display_name: Test RHEL8 py3.13 cov + display_name: "* Test RHEL8 py3.13 cov" run_on: - rhel87-small expansions: @@ -1099,7 +1099,7 @@ buildvariants: - name: .standalone .sync_async - name: .replica_set .sync_async - name: .sharded_cluster .sync_async - display_name: Test RHEL8 pypy3.10 cov + display_name: "* Test RHEL8 pypy3.10 cov" run_on: - rhel87-small expansions: @@ -1111,7 +1111,7 @@ buildvariants: - name: .sharded_cluster .auth .ssl .sync_async - name: .replica_set .noauth .ssl .sync_async - name: .standalone .noauth .nossl .sync_async - display_name: Test RHEL8 py3.10 + display_name: "* Test RHEL8 py3.10" run_on: - rhel87-small expansions: @@ -1122,7 +1122,7 @@ buildvariants: - name: .sharded_cluster .auth .ssl .sync_async - name: .replica_set .noauth .ssl .sync_async - name: .standalone .noauth .nossl .sync_async - display_name: Test RHEL8 py3.11 + display_name: "* Test RHEL8 py3.11" run_on: - rhel87-small expansions: @@ -1133,7 +1133,7 @@ buildvariants: - name: .sharded_cluster .auth .ssl .sync_async - name: .replica_set .noauth .ssl .sync_async - name: .standalone .noauth .nossl .sync_async - display_name: Test RHEL8 py3.12 + display_name: "* Test RHEL8 py3.12" run_on: - rhel87-small expansions: @@ -1144,7 +1144,7 @@ buildvariants: - name: .sharded_cluster .auth .ssl .sync_async - name: .replica_set .noauth .ssl .sync_async - name: .standalone .noauth .nossl .sync_async - display_name: Test RHEL8 pypy3.9 + display_name: "* Test RHEL8 pypy3.9" run_on: - rhel87-small expansions: @@ -1155,7 +1155,7 @@ buildvariants: - name: .sharded_cluster .auth .ssl !.sync_async - name: .replica_set .noauth .ssl !.sync_async - name: .standalone .noauth .nossl !.sync_async - display_name: Test macOS py3.9 + display_name: "* Test macOS py3.9" run_on: - macos-14 expansions: @@ -1166,7 +1166,7 @@ buildvariants: - name: .sharded_cluster .auth .ssl !.sync_async - name: .replica_set .noauth .ssl !.sync_async - name: .standalone .noauth .nossl !.sync_async - display_name: Test macOS py3.13 + display_name: "* Test macOS py3.13" run_on: - macos-14 expansions: @@ -1189,7 +1189,7 @@ buildvariants: - name: .sharded_cluster .auth .ssl .latest !.sync_async - name: .replica_set .noauth .ssl .latest !.sync_async - name: .standalone .noauth .nossl .latest !.sync_async - display_name: Test macOS Arm64 py3.9 + display_name: "* Test macOS Arm64 py3.9" run_on: - macos-14-arm64 expansions: @@ -1212,7 +1212,7 @@ buildvariants: - name: .sharded_cluster .auth .ssl .latest !.sync_async - name: .replica_set .noauth .ssl .latest !.sync_async - name: .standalone .noauth .nossl .latest !.sync_async - display_name: Test macOS Arm64 py3.13 + display_name: "* Test macOS Arm64 py3.13" run_on: - macos-14-arm64 expansions: @@ -1223,7 +1223,7 @@ buildvariants: - name: .sharded_cluster .auth .ssl !.sync_async - name: .replica_set .noauth .ssl !.sync_async - name: .standalone .noauth .nossl !.sync_async - display_name: Test Win64 py3.9 + display_name: "* Test Win64 py3.9" run_on: - windows-64-vsMulti-small expansions: @@ -1234,7 +1234,7 @@ buildvariants: - name: .sharded_cluster .auth .ssl !.sync_async - name: .replica_set .noauth .ssl !.sync_async - name: .standalone .noauth .nossl !.sync_async - display_name: Test Win64 py3.13 + display_name: "* Test Win64 py3.13" run_on: - windows-64-vsMulti-small expansions: @@ -1245,7 +1245,7 @@ buildvariants: - name: .sharded_cluster .auth .ssl !.sync_async - name: .replica_set .noauth .ssl !.sync_async - name: .standalone .noauth .nossl !.sync_async - display_name: Test Win32 py3.9 + display_name: "* Test Win32 py3.9" run_on: - windows-64-vsMulti-small expansions: @@ -1256,7 +1256,7 @@ buildvariants: - name: .sharded_cluster .auth .ssl !.sync_async - name: .replica_set .noauth .ssl !.sync_async - name: .standalone .noauth .nossl !.sync_async - display_name: Test Win32 py3.13 + display_name: "* Test Win32 py3.13" run_on: - windows-64-vsMulti-small expansions: @@ -1288,3 +1288,99 @@ buildvariants: AUTH: auth SSL: ssl PYTHON_BINARY: /opt/python/3.13/bin/python3 + + # Stable api tests + - name: stable-api-require-v1-rhel8-py3.9-auth + tasks: + - name: .standalone .5.0 .noauth .nossl .sync_async + - name: .standalone .6.0 .noauth .nossl .sync_async + - name: .standalone .7.0 .noauth .nossl .sync_async + - name: .standalone .8.0 .noauth .nossl .sync_async + - name: .standalone .rapid .noauth .nossl .sync_async + - name: .standalone .latest .noauth .nossl .sync_async + display_name: Stable API require v1 RHEL8 py3.9 Auth + run_on: + - rhel87-small + expansions: + AUTH: auth + REQUIRE_API_VERSION: "1" + MONGODB_API_VERSION: "1" + PYTHON_BINARY: /opt/python/3.9/bin/python3 + tags: [versionedApi_tag] + - name: stable-api-accept-v2-rhel8-py3.9-auth + tasks: + - name: .standalone .5.0 .noauth .nossl .sync_async + - name: .standalone .6.0 .noauth .nossl .sync_async + - name: .standalone .7.0 .noauth .nossl .sync_async + - name: .standalone .8.0 .noauth .nossl .sync_async + - name: .standalone .rapid .noauth .nossl .sync_async + - name: .standalone .latest .noauth .nossl .sync_async + display_name: Stable API accept v2 RHEL8 py3.9 Auth + run_on: + - rhel87-small + expansions: + AUTH: auth + ORCHESTRATION_FILE: versioned-api-testing.json + PYTHON_BINARY: /opt/python/3.9/bin/python3 + tags: [versionedApi_tag] + - name: stable-api-require-v1-rhel8-py3.13-auth + tasks: + - name: .standalone .5.0 .noauth .nossl .sync_async + - name: .standalone .6.0 .noauth .nossl .sync_async + - name: .standalone .7.0 .noauth .nossl .sync_async + - name: .standalone .8.0 .noauth .nossl .sync_async + - name: .standalone .rapid .noauth .nossl .sync_async + - name: .standalone .latest .noauth .nossl .sync_async + display_name: Stable API require v1 RHEL8 py3.13 Auth + run_on: + - rhel87-small + expansions: + AUTH: auth + REQUIRE_API_VERSION: "1" + MONGODB_API_VERSION: "1" + PYTHON_BINARY: /opt/python/3.13/bin/python3 + tags: [versionedApi_tag] + - name: stable-api-accept-v2-rhel8-py3.13-auth + tasks: + - name: .standalone .5.0 .noauth .nossl .sync_async + - name: .standalone .6.0 .noauth .nossl .sync_async + - name: .standalone .7.0 .noauth .nossl .sync_async + - name: .standalone .8.0 .noauth .nossl .sync_async + - name: .standalone .rapid .noauth .nossl .sync_async + - name: .standalone .latest .noauth .nossl .sync_async + display_name: Stable API accept v2 RHEL8 py3.13 Auth + run_on: + - rhel87-small + expansions: + AUTH: auth + ORCHESTRATION_FILE: versioned-api-testing.json + PYTHON_BINARY: /opt/python/3.13/bin/python3 + tags: [versionedApi_tag] + + # Storage engine tests + - name: storage-inmemory-rhel8-py3.9 + tasks: + - name: .standalone .noauth .nossl .4.0 .sync_async + - name: .standalone .noauth .nossl .4.4 .sync_async + - name: .standalone .noauth .nossl .5.0 .sync_async + - name: .standalone .noauth .nossl .6.0 .sync_async + - name: .standalone .noauth .nossl .7.0 .sync_async + - name: .standalone .noauth .nossl .8.0 .sync_async + - name: .standalone .noauth .nossl .rapid .sync_async + - name: .standalone .noauth .nossl .latest .sync_async + display_name: Storage InMemory RHEL8 py3.9 + run_on: + - rhel87-small + expansions: + STORAGE_ENGINE: inmemory + PYTHON_BINARY: /opt/python/3.9/bin/python3 + - name: storage-mmapv1-rhel8-py3.9 + tasks: + - name: .standalone .4.0 .noauth .nossl .sync_async + - name: .replica_set .4.0 .noauth .nossl .sync_async + display_name: Storage MMAPv1 RHEL8 py3.9 + run_on: + - rhel87-small + expansions: + STORAGE_ENGINE: mmapv1 + PYTHON_BINARY: /opt/python/3.9/bin/python3 diff --git a/.evergreen/scripts/generate_config.py b/.evergreen/scripts/generate_config.py index dfaad0f835..05529ecb25 100644 --- a/.evergreen/scripts/generate_config.py +++ b/.evergreen/scripts/generate_config.py @@ -93,7 +93,7 @@ def create_variant( else: host = host or "rhel8" run_on = [HOSTS[host].run_on] - name = display_name.replace(" ", "-").lower() + name = display_name.replace(" ", "-").replace("*-", "").lower() if python: expansions["PYTHON_BINARY"] = get_python_binary(python, host) if version: @@ -201,7 +201,7 @@ def create_ocsp_variants() -> list[BuildVariant]: variants = [] batchtime = BATCHTIME_WEEK * 2 expansions = dict(AUTH="noauth", SSL="ssl", TOPOLOGY="server") - base_display = "OCSP test" + base_display = "OCSP" # OCSP tests on rhel8 with all servers v4.4+ and all python versions. versions = [v for v in ALL_VERSIONS if v != "4.0"] @@ -241,10 +241,11 @@ def create_server_variants() -> list[BuildVariant]: # Run the full matrix on linux with min and max CPython, and latest pypy. host = "rhel8" + # Prefix the display name with an asterisk so it is sorted first. + base_display_name = "* Test" for python in [*MIN_MAX_PYTHON, PYPYS[-1]]: - display_name = f"Test {host}" expansions = dict(COVERAGE="coverage") - display_name = get_display_name("Test", host, python=python, **expansions) + display_name = get_display_name(base_display_name, host, python=python, **expansions) variant = create_variant( [f".{t} .sync_async" for t in TOPOLOGIES], display_name, @@ -258,7 +259,7 @@ def create_server_variants() -> list[BuildVariant]: # Test the rest of the pythons. for python in CPYTHONS[1:-1] + PYPYS[:-1]: display_name = f"Test {host}" - display_name = get_display_name("Test", host, python=python) + display_name = get_display_name(base_display_name, host, python=python) variant = create_variant( [f"{t} .sync_async" for t in SUB_TASKS], display_name, @@ -278,7 +279,7 @@ def create_server_variants() -> list[BuildVariant]: for version in get_versions_from("6.0"): tasks.extend(f"{t} .{version} !.sync_async" for t in SUB_TASKS) expansions = dict(SKIP_CSOT_TESTS="true") - display_name = get_display_name("Test", host, python=python, **expansions) + display_name = get_display_name(base_display_name, host, python=python, **expansions) variant = create_variant( tasks, display_name, @@ -385,7 +386,7 @@ def create_compression_variants(): for ind, (compressor, c_ext) in enumerate(product(["snappy", "zlib", "zstd"], C_EXTS)): expansions = dict(COMPRESSORS=compressor) handle_c_ext(c_ext, expansions) - base_name = f"{compressor} compression" + base_name = f"Compression {compressor}" python = CPYTHONS[ind % len(CPYTHONS)] display_name = get_display_name(base_name, host, python=python, **expansions) variant = create_variant( @@ -401,7 +402,7 @@ def create_compression_variants(): for compressor, python in zip_cycle(["snappy", "zlib", "zstd"], other_pythons): expansions = dict(COMPRESSORS=compressor) handle_c_ext(c_ext, expansions) - base_name = f"{compressor} compression" + base_name = f"Compression {compressor}" display_name = get_display_name(base_name, host, python=python, **expansions) variant = create_variant( task_names[compressor], @@ -427,7 +428,7 @@ def create_enterprise_auth_variants(): host = "win64" else: host = "rhel8" - display_name = get_display_name("Enterprise Auth", host, python=python, **expansions) + display_name = get_display_name("Auth Enterprise", host, python=python, **expansions) variant = create_variant( ["test-enterprise-auth"], display_name, host=host, python=python, expansions=expansions ) @@ -467,7 +468,7 @@ def create_pyopenssl_variants(): return variants -def create_storage_engine_tests(): +def create_storage_engine_variants(): host = "rhel8" engines = ["InMemory", "MMAPv1"] variants = [] @@ -490,7 +491,7 @@ def create_storage_engine_tests(): return variants -def create_versioned_api_tests(): +def create_stable_api_variants(): host = "rhel8" tags = ["versionedApi_tag"] tasks = [f".standalone .{v} .noauth .nossl .sync_async" for v in get_versions_from("5.0")] @@ -512,7 +513,7 @@ def create_versioned_api_tests(): # requireApiVersion, and don't automatically add apiVersion to # clients created in the test suite. expansions["ORCHESTRATION_FILE"] = "versioned-api-testing.json" - base_display_name = f"Versioned API {test_type}" + base_display_name = f"Stable API {test_type}" display_name = get_display_name(base_display_name, host, python=python, **expansions) variant = create_variant( tasks, display_name, host=host, python=python, tags=tags, expansions=expansions @@ -528,7 +529,7 @@ def create_green_framework_variants(): host = "rhel8" for python, framework in product([CPYTHONS[0], CPYTHONS[-2]], ["eventlet", "gevent"]): expansions = dict(GREEN_FRAMEWORK=framework, AUTH="auth", SSL="ssl") - display_name = get_display_name(f"{framework.capitalize()}", host, python=python) + display_name = get_display_name(f"Green {framework.capitalize()}", host, python=python) variant = create_variant( tasks, display_name, host=host, python=python, expansions=expansions ) @@ -623,7 +624,7 @@ def create_oidc_auth_variants(): variants.append( create_variant( tasks, - get_display_name("OIDC Auth", host), + get_display_name("Auth OIDC", host), host=host, batchtime=BATCHTIME_WEEK * 2, ) @@ -650,7 +651,7 @@ def create_mockupdb_variants(): return [ create_variant( ["mockupdb"], - get_display_name("MockupDB Tests", host, python=python), + get_display_name("MockupDB", host, python=python), python=python, host=host, ) @@ -704,7 +705,7 @@ def create_aws_auth_variants(): expansions["skip_web_identity_auth_test"] = "true" variant = create_variant( tasks, - get_display_name("AWS Auth", host, python=python), + get_display_name("Auth AWS", host, python=python), host=host, python=python, expansions=expansions, From 466d0a188f76d0cc6abe8352795d71d9af09a5d3 Mon Sep 17 00:00:00 2001 From: Casey Clements Date: Tue, 5 Nov 2024 10:47:36 -0500 Subject: [PATCH 094/182] PYTHON-4903 Adds typing overloading to bson.binary.Binary.from_vector (#1967) --- bson/binary.py | 28 ++++++++++++++++++++-------- test/test_bson.py | 6 ++++++ 2 files changed, 26 insertions(+), 8 deletions(-) diff --git a/bson/binary.py b/bson/binary.py index f03173a8ef..6dc5058c2c 100644 --- a/bson/binary.py +++ b/bson/binary.py @@ -16,7 +16,7 @@ import struct from dataclasses import dataclass from enum import Enum -from typing import TYPE_CHECKING, Any, Optional, Sequence, Tuple, Type, Union +from typing import TYPE_CHECKING, Any, Optional, Sequence, Tuple, Type, Union, overload from uuid import UUID """Tools for representing BSON binary data. @@ -195,7 +195,7 @@ class UuidRepresentation: VECTOR_SUBTYPE = 9 -"""**(BETA)** BSON binary subtype for densely packed vector data. +"""BSON binary subtype for densely packed vector data. .. versionadded:: 4.10 """ @@ -207,7 +207,7 @@ class UuidRepresentation: class BinaryVectorDtype(Enum): - """**(BETA)** Datatypes of vector subtype. + """Datatypes of vector subtype. :param FLOAT32: (0x27) Pack list of :class:`float` as float32 :param INT8: (0x03) Pack list of :class:`int` in [-128, 127] as signed int8 @@ -229,7 +229,7 @@ class BinaryVectorDtype(Enum): @dataclass class BinaryVector: - """**(BETA)** Vector of numbers along with metadata for binary interoperability. + """Vector of numbers along with metadata for binary interoperability. .. versionadded:: 4.10 """ @@ -256,7 +256,7 @@ class Binary(bytes): the difference between what should be considered binary data and what should be considered a string when we encode to BSON. - **(BETA)** Subtype 9 provides a space-efficient representation of 1-dimensional vector data. + Subtype 9 provides a space-efficient representation of 1-dimensional vector data. Its data is prepended with two bytes of metadata. The first (dtype) describes its data type, such as float32 or int8. The second (padding) prescribes the number of bits to ignore in the final byte. @@ -278,7 +278,7 @@ class Binary(bytes): Support any bytes-like type that implements the buffer protocol. .. versionchanged:: 4.10 - **(BETA)** Addition of vector subtype. + Addition of vector subtype. """ _type_marker = 5 @@ -397,6 +397,18 @@ def as_uuid(self, uuid_representation: int = UuidRepresentation.STANDARD) -> UUI f"cannot decode subtype {self.subtype} to {UUID_REPRESENTATION_NAMES[uuid_representation]}" ) + @classmethod + @overload + def from_vector(cls: Type[Binary], vector: BinaryVector) -> Binary: + ... + + @classmethod + @overload + def from_vector( + cls: Type[Binary], vector: list[int, float], dtype: BinaryVectorDtype, padding: int = 0 + ) -> Binary: + ... + @classmethod def from_vector( cls: Type[Binary], @@ -404,7 +416,7 @@ def from_vector( dtype: Optional[BinaryVectorDtype] = None, padding: Optional[int] = None, ) -> Binary: - """**(BETA)** Create a BSON :class:`~bson.binary.Binary` of Vector subtype. + """Create a BSON :class:`~bson.binary.Binary` of Vector subtype. To interpret the representation of the numbers, a data type must be included. See :class:`~bson.binary.BinaryVectorDtype` for available types and descriptions. @@ -447,7 +459,7 @@ def from_vector( return cls(metadata + data, subtype=VECTOR_SUBTYPE) def as_vector(self) -> BinaryVector: - """**(BETA)** From the Binary, create a list of numbers, along with dtype and padding. + """From the Binary, create a list of numbers, along with dtype and padding. :return: BinaryVector diff --git a/test/test_bson.py b/test/test_bson.py index 5dc1377bcd..b431f700dc 100644 --- a/test/test_bson.py +++ b/test/test_bson.py @@ -802,6 +802,12 @@ def test_vector(self): assert float_binary == Binary.from_vector( BinaryVector(list_vector, BinaryVectorDtype.FLOAT32) ) + # Confirm kwargs cannot be passed when BinaryVector is provided + with self.assertRaises(ValueError): + Binary.from_vector( + BinaryVector(list_vector, BinaryVectorDtype.PACKED_BIT, padding), + dtype=BinaryVectorDtype.PACKED_BIT, + ) # type: ignore[call-overload] def test_unicode_regex(self): """Tests we do not get a segfault for C extension on unicode RegExs. From 0733c4da44d4cd5c2a32c092762e639e14c3dc27 Mon Sep 17 00:00:00 2001 From: Shane Harvey Date: Tue, 5 Nov 2024 12:19:51 -0800 Subject: [PATCH 095/182] PYTHON-4925 Fix test bugs in $$matchAsDocument and $$matchAsRoot (#1988) Fixes a bug where the driverConnectionId field was missing from "server heartbeat failed" log messages. Avoids sending "upsert": False since various client.bulkWrite spec tests assume this field is only sent when it's True. --- pymongo/asynchronous/bulk.py | 18 +- pymongo/asynchronous/client_bulk.py | 16 +- pymongo/asynchronous/monitor.py | 5 + pymongo/operations.py | 4 +- pymongo/synchronous/bulk.py | 18 +- pymongo/synchronous/client_bulk.py | 16 +- pymongo/synchronous/monitor.py | 5 + test/asynchronous/unified_format.py | 4 +- .../unified/logging-replicaset.json | 4 + .../unified/logging-sharded.json | 2 + .../unified/logging-standalone.json | 2 + .../runOnRequirement-authMechanism-type.json | 4 +- .../valid-fail/operator-matchAsDocument.json | 205 ++++++++++++++++++ .../valid-fail/operator-matchAsRoot.json | 67 ++++++ ...es-lte-operator.json => operator-lte.json} | 2 +- .../valid-pass/operator-matchAsDocument.json | 124 +++++++++++ .../valid-pass/operator-matchAsRoot.json | 151 +++++++++++++ test/unified_format.py | 4 +- test/unified_format_shared.py | 21 +- test/utils.py | 2 + 20 files changed, 609 insertions(+), 65 deletions(-) create mode 100644 test/unified-test-format/valid-fail/operator-matchAsDocument.json create mode 100644 test/unified-test-format/valid-fail/operator-matchAsRoot.json rename test/unified-test-format/valid-pass/{matches-lte-operator.json => operator-lte.json} (97%) create mode 100644 test/unified-test-format/valid-pass/operator-matchAsDocument.json create mode 100644 test/unified-test-format/valid-pass/operator-matchAsRoot.json diff --git a/pymongo/asynchronous/bulk.py b/pymongo/asynchronous/bulk.py index e6cfe5b36e..6770d7b34e 100644 --- a/pymongo/asynchronous/bulk.py +++ b/pymongo/asynchronous/bulk.py @@ -140,8 +140,8 @@ def add_update( self, selector: Mapping[str, Any], update: Union[Mapping[str, Any], _Pipeline], - multi: bool = False, - upsert: bool = False, + multi: bool, + upsert: Optional[bool], collation: Optional[Mapping[str, Any]] = None, array_filters: Optional[list[Mapping[str, Any]]] = None, hint: Union[str, dict[str, Any], None] = None, @@ -149,9 +149,9 @@ def add_update( ) -> None: """Create an update document and add it to the list of ops.""" validate_ok_for_update(update) - cmd: dict[str, Any] = dict( # noqa: C406 - [("q", selector), ("u", update), ("multi", multi), ("upsert", upsert)] - ) + cmd: dict[str, Any] = {"q": selector, "u": update, "multi": multi} + if upsert is not None: + cmd["upsert"] = upsert if collation is not None: self.uses_collation = True cmd["collation"] = collation @@ -173,14 +173,16 @@ def add_replace( self, selector: Mapping[str, Any], replacement: Mapping[str, Any], - upsert: bool = False, + upsert: Optional[bool], collation: Optional[Mapping[str, Any]] = None, hint: Union[str, dict[str, Any], None] = None, sort: Optional[Mapping[str, Any]] = None, ) -> None: """Create a replace document and add it to the list of ops.""" validate_ok_for_replace(replacement) - cmd = {"q": selector, "u": replacement, "multi": False, "upsert": upsert} + cmd: dict[str, Any] = {"q": selector, "u": replacement} + if upsert is not None: + cmd["upsert"] = upsert if collation is not None: self.uses_collation = True cmd["collation"] = collation @@ -200,7 +202,7 @@ def add_delete( hint: Union[str, dict[str, Any], None] = None, ) -> None: """Create a delete document and add it to the list of ops.""" - cmd = {"q": selector, "limit": limit} + cmd: dict[str, Any] = {"q": selector, "limit": limit} if collation is not None: self.uses_collation = True cmd["collation"] = collation diff --git a/pymongo/asynchronous/client_bulk.py b/pymongo/asynchronous/client_bulk.py index a6f7178e47..0dcdaa6c07 100644 --- a/pymongo/asynchronous/client_bulk.py +++ b/pymongo/asynchronous/client_bulk.py @@ -106,20 +106,13 @@ def __init__( self.bypass_doc_val = bypass_document_validation self.comment = comment self.verbose_results = verbose_results - self.ops: list[tuple[str, Mapping[str, Any]]] = [] self.namespaces: list[str] = [] self.idx_offset: int = 0 self.total_ops: int = 0 - self.executed = False - self.uses_upsert = False self.uses_collation = False self.uses_array_filters = False - self.uses_hint_update = False - self.uses_hint_delete = False - self.uses_sort = False - self.is_retryable = self.client.options.retry_writes self.retrying = False self.started_retryable_write = False @@ -144,7 +137,7 @@ def add_update( namespace: str, selector: Mapping[str, Any], update: Union[Mapping[str, Any], _Pipeline], - multi: bool = False, + multi: bool, upsert: Optional[bool] = None, collation: Optional[Mapping[str, Any]] = None, array_filters: Optional[list[Mapping[str, Any]]] = None, @@ -160,19 +153,16 @@ def add_update( "multi": multi, } if upsert is not None: - self.uses_upsert = True cmd["upsert"] = upsert if array_filters is not None: self.uses_array_filters = True cmd["arrayFilters"] = array_filters if hint is not None: - self.uses_hint_update = True cmd["hint"] = hint if collation is not None: self.uses_collation = True cmd["collation"] = collation if sort is not None: - self.uses_sort = True cmd["sort"] = sort if multi: # A bulk_write containing an update_many is not retryable. @@ -200,16 +190,13 @@ def add_replace( "multi": False, } if upsert is not None: - self.uses_upsert = True cmd["upsert"] = upsert if hint is not None: - self.uses_hint_update = True cmd["hint"] = hint if collation is not None: self.uses_collation = True cmd["collation"] = collation if sort is not None: - self.uses_sort = True cmd["sort"] = sort self.ops.append(("replace", cmd)) self.namespaces.append(namespace) @@ -226,7 +213,6 @@ def add_delete( """Create a delete document and add it to the list of ops.""" cmd = {"delete": -1, "filter": selector, "multi": multi} if hint is not None: - self.uses_hint_delete = True cmd["hint"] = hint if collation is not None: self.uses_collation = True diff --git a/pymongo/asynchronous/monitor.py b/pymongo/asynchronous/monitor.py index f9e912b084..a4dc9b7f45 100644 --- a/pymongo/asynchronous/monitor.py +++ b/pymongo/asynchronous/monitor.py @@ -149,6 +149,7 @@ def __init__( self._listeners = self._settings._pool_options._event_listeners self._publish = self._listeners is not None and self._listeners.enabled_for_server_heartbeat self._cancel_context: Optional[_CancellationContext] = None + self._conn_id: Optional[int] = None self._rtt_monitor = _RttMonitor( topology, topology_settings, @@ -243,6 +244,7 @@ async def _check_server(self) -> ServerDescription: Returns a ServerDescription. """ + self._conn_id = None start = time.monotonic() try: try: @@ -272,6 +274,7 @@ async def _check_server(self) -> ServerDescription: awaited=awaited, durationMS=duration * 1000, failure=error, + driverConnectionId=self._conn_id, message=_SDAMStatusMessage.HEARTBEAT_FAIL, ) await self._reset_connection() @@ -314,6 +317,8 @@ async def _check_once(self) -> ServerDescription: ) self._cancel_context = conn.cancel_context + # Record the connection id so we can later attach it to the failed log message. + self._conn_id = conn.id response, round_trip_time = await self._check_with_socket(conn) if not response.awaitable: self._rtt_monitor.add_sample(round_trip_time) diff --git a/pymongo/operations.py b/pymongo/operations.py index 8905048c4e..482ab68003 100644 --- a/pymongo/operations.py +++ b/pymongo/operations.py @@ -332,7 +332,7 @@ def __init__( self, filter: Mapping[str, Any], replacement: Union[_DocumentType, RawBSONDocument], - upsert: bool = False, + upsert: Optional[bool] = None, collation: Optional[_CollationIn] = None, hint: Optional[_IndexKeyHint] = None, namespace: Optional[str] = None, @@ -693,7 +693,7 @@ def _add_to_bulk(self, bulkobj: _AgnosticBulk) -> None: self._filter, self._doc, True, - bool(self._upsert), + self._upsert, collation=validate_collation_or_none(self._collation), array_filters=self._array_filters, hint=self._hint, diff --git a/pymongo/synchronous/bulk.py b/pymongo/synchronous/bulk.py index 7fb29a977f..0b709f1acf 100644 --- a/pymongo/synchronous/bulk.py +++ b/pymongo/synchronous/bulk.py @@ -140,8 +140,8 @@ def add_update( self, selector: Mapping[str, Any], update: Union[Mapping[str, Any], _Pipeline], - multi: bool = False, - upsert: bool = False, + multi: bool, + upsert: Optional[bool], collation: Optional[Mapping[str, Any]] = None, array_filters: Optional[list[Mapping[str, Any]]] = None, hint: Union[str, dict[str, Any], None] = None, @@ -149,9 +149,9 @@ def add_update( ) -> None: """Create an update document and add it to the list of ops.""" validate_ok_for_update(update) - cmd: dict[str, Any] = dict( # noqa: C406 - [("q", selector), ("u", update), ("multi", multi), ("upsert", upsert)] - ) + cmd: dict[str, Any] = {"q": selector, "u": update, "multi": multi} + if upsert is not None: + cmd["upsert"] = upsert if collation is not None: self.uses_collation = True cmd["collation"] = collation @@ -173,14 +173,16 @@ def add_replace( self, selector: Mapping[str, Any], replacement: Mapping[str, Any], - upsert: bool = False, + upsert: Optional[bool], collation: Optional[Mapping[str, Any]] = None, hint: Union[str, dict[str, Any], None] = None, sort: Optional[Mapping[str, Any]] = None, ) -> None: """Create a replace document and add it to the list of ops.""" validate_ok_for_replace(replacement) - cmd = {"q": selector, "u": replacement, "multi": False, "upsert": upsert} + cmd: dict[str, Any] = {"q": selector, "u": replacement} + if upsert is not None: + cmd["upsert"] = upsert if collation is not None: self.uses_collation = True cmd["collation"] = collation @@ -200,7 +202,7 @@ def add_delete( hint: Union[str, dict[str, Any], None] = None, ) -> None: """Create a delete document and add it to the list of ops.""" - cmd = {"q": selector, "limit": limit} + cmd: dict[str, Any] = {"q": selector, "limit": limit} if collation is not None: self.uses_collation = True cmd["collation"] = collation diff --git a/pymongo/synchronous/client_bulk.py b/pymongo/synchronous/client_bulk.py index 6cb4275417..625e8429eb 100644 --- a/pymongo/synchronous/client_bulk.py +++ b/pymongo/synchronous/client_bulk.py @@ -106,20 +106,13 @@ def __init__( self.bypass_doc_val = bypass_document_validation self.comment = comment self.verbose_results = verbose_results - self.ops: list[tuple[str, Mapping[str, Any]]] = [] self.namespaces: list[str] = [] self.idx_offset: int = 0 self.total_ops: int = 0 - self.executed = False - self.uses_upsert = False self.uses_collation = False self.uses_array_filters = False - self.uses_hint_update = False - self.uses_hint_delete = False - self.uses_sort = False - self.is_retryable = self.client.options.retry_writes self.retrying = False self.started_retryable_write = False @@ -144,7 +137,7 @@ def add_update( namespace: str, selector: Mapping[str, Any], update: Union[Mapping[str, Any], _Pipeline], - multi: bool = False, + multi: bool, upsert: Optional[bool] = None, collation: Optional[Mapping[str, Any]] = None, array_filters: Optional[list[Mapping[str, Any]]] = None, @@ -160,19 +153,16 @@ def add_update( "multi": multi, } if upsert is not None: - self.uses_upsert = True cmd["upsert"] = upsert if array_filters is not None: self.uses_array_filters = True cmd["arrayFilters"] = array_filters if hint is not None: - self.uses_hint_update = True cmd["hint"] = hint if collation is not None: self.uses_collation = True cmd["collation"] = collation if sort is not None: - self.uses_sort = True cmd["sort"] = sort if multi: # A bulk_write containing an update_many is not retryable. @@ -200,16 +190,13 @@ def add_replace( "multi": False, } if upsert is not None: - self.uses_upsert = True cmd["upsert"] = upsert if hint is not None: - self.uses_hint_update = True cmd["hint"] = hint if collation is not None: self.uses_collation = True cmd["collation"] = collation if sort is not None: - self.uses_sort = True cmd["sort"] = sort self.ops.append(("replace", cmd)) self.namespaces.append(namespace) @@ -226,7 +213,6 @@ def add_delete( """Create a delete document and add it to the list of ops.""" cmd = {"delete": -1, "filter": selector, "multi": multi} if hint is not None: - self.uses_hint_delete = True cmd["hint"] = hint if collation is not None: self.uses_collation = True diff --git a/pymongo/synchronous/monitor.py b/pymongo/synchronous/monitor.py index 3f9bb2ea75..d02ad0a6fd 100644 --- a/pymongo/synchronous/monitor.py +++ b/pymongo/synchronous/monitor.py @@ -149,6 +149,7 @@ def __init__( self._listeners = self._settings._pool_options._event_listeners self._publish = self._listeners is not None and self._listeners.enabled_for_server_heartbeat self._cancel_context: Optional[_CancellationContext] = None + self._conn_id: Optional[int] = None self._rtt_monitor = _RttMonitor( topology, topology_settings, @@ -243,6 +244,7 @@ def _check_server(self) -> ServerDescription: Returns a ServerDescription. """ + self._conn_id = None start = time.monotonic() try: try: @@ -272,6 +274,7 @@ def _check_server(self) -> ServerDescription: awaited=awaited, durationMS=duration * 1000, failure=error, + driverConnectionId=self._conn_id, message=_SDAMStatusMessage.HEARTBEAT_FAIL, ) self._reset_connection() @@ -314,6 +317,8 @@ def _check_once(self) -> ServerDescription: ) self._cancel_context = conn.cancel_context + # Record the connection id so we can later attach it to the failed log message. + self._conn_id = conn.id response, round_trip_time = self._check_with_socket(conn) if not response.awaitable: self._rtt_monitor.add_sample(round_trip_time) diff --git a/test/asynchronous/unified_format.py b/test/asynchronous/unified_format.py index f8179dc0c7..81feed4d4c 100644 --- a/test/asynchronous/unified_format.py +++ b/test/asynchronous/unified_format.py @@ -1328,8 +1328,8 @@ def format_logs(log_list): if log.module == "ocsp_support": continue data = json_util.loads(log.getMessage()) - client = data.pop("clientId") if "clientId" in data else data.pop("topologyId") - client_to_log[client].append( + client_id = data.get("clientId", data.get("topologyId")) + client_to_log[client_id].append( { "level": log.levelname.lower(), "component": log.name.replace("pymongo.", "", 1), diff --git a/test/discovery_and_monitoring/unified/logging-replicaset.json b/test/discovery_and_monitoring/unified/logging-replicaset.json index e6738225cd..fe6ac60b68 100644 --- a/test/discovery_and_monitoring/unified/logging-replicaset.json +++ b/test/discovery_and_monitoring/unified/logging-replicaset.json @@ -357,6 +357,7 @@ }, "durationMS": { "$$type": [ + "double", "int", "long" ] @@ -398,6 +399,7 @@ }, "durationMS": { "$$type": [ + "double", "int", "long" ] @@ -439,6 +441,7 @@ }, "durationMS": { "$$type": [ + "double", "int", "long" ] @@ -589,6 +592,7 @@ }, "durationMS": { "$$type": [ + "double", "int", "long" ] diff --git a/test/discovery_and_monitoring/unified/logging-sharded.json b/test/discovery_and_monitoring/unified/logging-sharded.json index 61b27f5be0..3788708ab0 100644 --- a/test/discovery_and_monitoring/unified/logging-sharded.json +++ b/test/discovery_and_monitoring/unified/logging-sharded.json @@ -324,6 +324,7 @@ }, "durationMS": { "$$type": [ + "double", "int", "long" ] @@ -475,6 +476,7 @@ }, "durationMS": { "$$type": [ + "double", "int", "long" ] diff --git a/test/discovery_and_monitoring/unified/logging-standalone.json b/test/discovery_and_monitoring/unified/logging-standalone.json index 1ee6dbe899..0682a1a4fb 100644 --- a/test/discovery_and_monitoring/unified/logging-standalone.json +++ b/test/discovery_and_monitoring/unified/logging-standalone.json @@ -339,6 +339,7 @@ }, "durationMS": { "$$type": [ + "double", "int", "long" ] @@ -500,6 +501,7 @@ }, "durationMS": { "$$type": [ + "double", "int", "long" ] diff --git a/test/unified-test-format/invalid/runOnRequirement-authMechanism-type.json b/test/unified-test-format/invalid/runOnRequirement-authMechanism-type.json index b97654a743..007f3f304c 100644 --- a/test/unified-test-format/invalid/runOnRequirement-authMechanism-type.json +++ b/test/unified-test-format/invalid/runOnRequirement-authMechanism-type.json @@ -9,9 +9,7 @@ "tests": [ { "description": "foo", - "operations": [ - - ] + "operations": [] } ] } diff --git a/test/unified-test-format/valid-fail/operator-matchAsDocument.json b/test/unified-test-format/valid-fail/operator-matchAsDocument.json new file mode 100644 index 0000000000..24f6be9cb8 --- /dev/null +++ b/test/unified-test-format/valid-fail/operator-matchAsDocument.json @@ -0,0 +1,205 @@ +{ + "description": "operator-matchAsDocument", + "schemaVersion": "1.13", + "createEntities": [ + { + "client": { + "id": "client0" + } + }, + { + "database": { + "id": "database0", + "client": "client0", + "databaseName": "test" + } + }, + { + "collection": { + "id": "collection0", + "database": "database0", + "collectionName": "coll0" + } + } + ], + "initialData": [ + { + "collectionName": "coll0", + "databaseName": "test", + "documents": [ + { + "_id": 1, + "json": "{ \"x\": 1, \"y\": 2 }" + }, + { + "_id": 2, + "json": "1" + }, + { + "_id": 3, + "json": "[ \"foo\" ]" + }, + { + "_id": 4, + "json": "{ \"x\" }" + } + ] + } + ], + "tests": [ + { + "description": "matchAsDocument with non-matching filter", + "operations": [ + { + "name": "find", + "object": "collection0", + "arguments": { + "filter": { + "_id": 1 + }, + "limit": 1 + }, + "expectResult": [ + { + "_id": 1, + "json": { + "$$matchAsDocument": { + "x": 1, + "y": "two" + } + } + } + ] + } + ] + }, + { + "description": "matchAsDocument evaluates special operators", + "operations": [ + { + "name": "find", + "object": "collection0", + "arguments": { + "filter": { + "_id": 1 + }, + "limit": 1 + }, + "expectResult": [ + { + "_id": 1, + "json": { + "$$matchAsDocument": { + "x": 1, + "y": { + "$$exists": false + } + } + } + } + ] + } + ] + }, + { + "description": "matchAsDocument does not permit extra fields", + "operations": [ + { + "name": "find", + "object": "collection0", + "arguments": { + "filter": { + "_id": 1 + }, + "limit": 1 + }, + "expectResult": [ + { + "_id": 1, + "json": { + "$$matchAsDocument": { + "x": 1 + } + } + } + ] + } + ] + }, + { + "description": "matchAsDocument expects JSON object but given scalar", + "operations": [ + { + "name": "find", + "object": "collection0", + "arguments": { + "filter": { + "_id": 2 + }, + "limit": 1 + }, + "expectResult": [ + { + "_id": 2, + "json": { + "$$matchAsDocument": { + "$$matchAsRoot": {} + } + } + } + ] + } + ] + }, + { + "description": "matchAsDocument expects JSON object but given array", + "operations": [ + { + "name": "find", + "object": "collection0", + "arguments": { + "filter": { + "_id": 3 + }, + "limit": 1 + }, + "expectResult": [ + { + "_id": 3, + "json": { + "$$matchAsDocument": { + "$$matchAsRoot": {} + } + } + } + ] + } + ] + }, + { + "description": "matchAsDocument fails to decode Extended JSON", + "operations": [ + { + "name": "find", + "object": "collection0", + "arguments": { + "filter": { + "_id": 4 + }, + "limit": 1 + }, + "expectResult": [ + { + "_id": 4, + "json": { + "$$matchAsDocument": { + "$$matchAsRoot": {} + } + } + } + ] + } + ] + } + ] +} diff --git a/test/unified-test-format/valid-fail/operator-matchAsRoot.json b/test/unified-test-format/valid-fail/operator-matchAsRoot.json new file mode 100644 index 0000000000..ec6309418c --- /dev/null +++ b/test/unified-test-format/valid-fail/operator-matchAsRoot.json @@ -0,0 +1,67 @@ +{ + "description": "operator-matchAsRoot", + "schemaVersion": "1.13", + "createEntities": [ + { + "client": { + "id": "client0" + } + }, + { + "database": { + "id": "database0", + "client": "client0", + "databaseName": "test" + } + }, + { + "collection": { + "id": "collection0", + "database": "database0", + "collectionName": "coll0" + } + } + ], + "initialData": [ + { + "collectionName": "coll0", + "databaseName": "test", + "documents": [ + { + "_id": 1, + "x": { + "y": 2, + "z": 3 + } + } + ] + } + ], + "tests": [ + { + "description": "matchAsRoot with nested document does not match", + "operations": [ + { + "name": "find", + "object": "collection0", + "arguments": { + "filter": { + "_id": 1 + }, + "limit": 1 + }, + "expectResult": [ + { + "_id": 1, + "x": { + "$$matchAsRoot": { + "y": 3 + } + } + } + ] + } + ] + } + ] +} diff --git a/test/unified-test-format/valid-pass/matches-lte-operator.json b/test/unified-test-format/valid-pass/operator-lte.json similarity index 97% rename from test/unified-test-format/valid-pass/matches-lte-operator.json rename to test/unified-test-format/valid-pass/operator-lte.json index 4de65c5838..4a13b16d15 100644 --- a/test/unified-test-format/valid-pass/matches-lte-operator.json +++ b/test/unified-test-format/valid-pass/operator-lte.json @@ -1,5 +1,5 @@ { - "description": "matches-lte-operator", + "description": "operator-lte", "schemaVersion": "1.9", "createEntities": [ { diff --git a/test/unified-test-format/valid-pass/operator-matchAsDocument.json b/test/unified-test-format/valid-pass/operator-matchAsDocument.json new file mode 100644 index 0000000000..fd8b514d4a --- /dev/null +++ b/test/unified-test-format/valid-pass/operator-matchAsDocument.json @@ -0,0 +1,124 @@ +{ + "description": "operator-matchAsDocument", + "schemaVersion": "1.13", + "createEntities": [ + { + "client": { + "id": "client0" + } + }, + { + "database": { + "id": "database0", + "client": "client0", + "databaseName": "test" + } + }, + { + "collection": { + "id": "collection0", + "database": "database0", + "collectionName": "coll0" + } + } + ], + "initialData": [ + { + "collectionName": "coll0", + "databaseName": "test", + "documents": [ + { + "_id": 1, + "json": "{ \"x\": 1, \"y\": 2.0 }" + }, + { + "_id": 2, + "json": "{ \"x\": { \"$oid\": \"57e193d7a9cc81b4027498b5\" } }" + } + ] + } + ], + "tests": [ + { + "description": "matchAsDocument performs flexible numeric comparisons", + "operations": [ + { + "name": "find", + "object": "collection0", + "arguments": { + "filter": { + "_id": 1 + }, + "limit": 1 + }, + "expectResult": [ + { + "_id": 1, + "json": { + "$$matchAsDocument": { + "x": 1, + "y": 2 + } + } + } + ] + } + ] + }, + { + "description": "matchAsDocument evaluates special operators", + "operations": [ + { + "name": "find", + "object": "collection0", + "arguments": { + "filter": { + "_id": 1 + }, + "limit": 1 + }, + "expectResult": [ + { + "_id": 1, + "json": { + "$$matchAsDocument": { + "x": 1, + "y": { + "$$exists": true + } + } + } + } + ] + } + ] + }, + { + "description": "matchAsDocument decodes Extended JSON", + "operations": [ + { + "name": "find", + "object": "collection0", + "arguments": { + "filter": { + "_id": 2 + }, + "limit": 1 + }, + "expectResult": [ + { + "_id": 2, + "json": { + "$$matchAsDocument": { + "x": { + "$$type": "objectId" + } + } + } + } + ] + } + ] + } + ] +} diff --git a/test/unified-test-format/valid-pass/operator-matchAsRoot.json b/test/unified-test-format/valid-pass/operator-matchAsRoot.json new file mode 100644 index 0000000000..1966e3b377 --- /dev/null +++ b/test/unified-test-format/valid-pass/operator-matchAsRoot.json @@ -0,0 +1,151 @@ +{ + "description": "operator-matchAsRoot", + "schemaVersion": "1.13", + "createEntities": [ + { + "client": { + "id": "client0" + } + }, + { + "database": { + "id": "database0", + "client": "client0", + "databaseName": "test" + } + }, + { + "collection": { + "id": "collection0", + "database": "database0", + "collectionName": "coll0" + } + } + ], + "initialData": [ + { + "collectionName": "coll0", + "databaseName": "test", + "documents": [ + { + "_id": 1, + "x": { + "y": 2, + "z": 3 + } + }, + { + "_id": 2, + "json": "{ \"x\": 1, \"y\": 2 }" + } + ] + } + ], + "tests": [ + { + "description": "matchAsRoot with nested document", + "operations": [ + { + "name": "find", + "object": "collection0", + "arguments": { + "filter": { + "_id": 1 + }, + "limit": 1 + }, + "expectResult": [ + { + "_id": 1, + "x": { + "$$matchAsRoot": { + "y": 2 + } + } + } + ] + } + ] + }, + { + "description": "matchAsRoot performs flexible numeric comparisons", + "operations": [ + { + "name": "find", + "object": "collection0", + "arguments": { + "filter": { + "_id": 1 + }, + "limit": 1 + }, + "expectResult": [ + { + "_id": 1, + "x": { + "$$matchAsRoot": { + "y": 2 + } + } + } + ] + } + ] + }, + { + "description": "matchAsRoot evaluates special operators", + "operations": [ + { + "name": "find", + "object": "collection0", + "arguments": { + "filter": { + "_id": 1 + }, + "limit": 1 + }, + "expectResult": [ + { + "_id": 1, + "x": { + "$$matchAsRoot": { + "y": 2, + "z": { + "$$exists": true + } + } + } + } + ] + } + ] + }, + { + "description": "matchAsRoot with matchAsDocument", + "operations": [ + { + "name": "find", + "object": "collection0", + "arguments": { + "filter": { + "_id": 2 + }, + "limit": 1 + }, + "expectResult": [ + { + "_id": 2, + "json": { + "$$matchAsDocument": { + "$$matchAsRoot": { + "x": 1 + } + } + } + } + ] + } + ] + } + ] +} diff --git a/test/unified_format.py b/test/unified_format.py index 80c37470e3..395d40b2d1 100644 --- a/test/unified_format.py +++ b/test/unified_format.py @@ -1314,8 +1314,8 @@ def format_logs(log_list): if log.module == "ocsp_support": continue data = json_util.loads(log.getMessage()) - client = data.pop("clientId") if "clientId" in data else data.pop("topologyId") - client_to_log[client].append( + client_id = data.get("clientId", data.get("topologyId")) + client_to_log[client_id].append( { "level": log.levelname.lower(), "component": log.name.replace("pymongo.", "", 1), diff --git a/test/unified_format_shared.py b/test/unified_format_shared.py index 1c87fb3f18..0c685366f4 100644 --- a/test/unified_format_shared.py +++ b/test/unified_format_shared.py @@ -433,10 +433,12 @@ def _operation_lte(self, spec, actual, key_to_compare): self.test.assertLessEqual(actual[key_to_compare], spec) def _operation_matchAsDocument(self, spec, actual, key_to_compare): - self._match_document(spec, json_util.loads(actual[key_to_compare]), False) + self._match_document(spec, json_util.loads(actual[key_to_compare]), False, test=True) def _operation_matchAsRoot(self, spec, actual, key_to_compare): - self._match_document(spec, actual, True) + if key_to_compare: + actual = actual[key_to_compare] + self._match_document(spec, actual, True, test=True) def _evaluate_special_operation(self, opname, spec, actual, key_to_compare): method_name = "_operation_{}".format(opname.strip("$")) @@ -489,7 +491,7 @@ def _evaluate_if_special_operation(self, expectation, actual, key_to_compare=Non def _match_document(self, expectation, actual, is_root, test=False): if self._evaluate_if_special_operation(expectation, actual): - return + return True self.test.assertIsInstance(actual, abc.Mapping) for key, value in expectation.items(): @@ -521,25 +523,26 @@ def match_result(self, expectation, actual, in_recursive_call=False, test=True): self.test.assertIsInstance(actual, abc.MutableSequence) for e, a in zip(expectation, actual): if isinstance(e, abc.Mapping): - self._match_document(e, a, is_root=not in_recursive_call, test=test) + res = self._match_document(e, a, is_root=not in_recursive_call, test=test) else: - self.match_result(e, a, in_recursive_call=True, test=test) - return None + res = self.match_result(e, a, in_recursive_call=True, test=test) + if not res: + return False + return True # account for flexible numerics in element-wise comparison - if isinstance(expectation, int) or isinstance(expectation, float): + if isinstance(expectation, (int, float)): if test: self.test.assertEqual(expectation, actual) else: return expectation == actual - return None else: if test: self.test.assertIsInstance(actual, type(expectation)) self.test.assertEqual(expectation, actual) else: return isinstance(actual, type(expectation)) and expectation == actual - return None + return True def match_server_description(self, actual: ServerDescription, spec: dict) -> None: for field, expected in spec.items(): diff --git a/test/utils.py b/test/utils.py index 3eac4fa509..766f209de2 100644 --- a/test/utils.py +++ b/test/utils.py @@ -20,6 +20,7 @@ import copy import functools import os +import random import re import shutil import sys @@ -309,6 +310,7 @@ class MockConnection: def __init__(self): self.cancel_context = _CancellationContext() self.more_to_come = False + self.id = random.randint(0, 100) def close_conn(self, reason): pass From 41527f06bb166f81e0e10608a2b5c4a1d6446a46 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 11 Nov 2024 06:38:46 -0600 Subject: [PATCH 096/182] Bump pyright from 1.1.385 to 1.1.388 (#1999) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/typing.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/typing.txt b/requirements/typing.txt index b1f07604dc..ad799ea368 100644 --- a/requirements/typing.txt +++ b/requirements/typing.txt @@ -1,5 +1,5 @@ mypy==1.13.0 -pyright==1.1.385 +pyright==1.1.388 typing_extensions -r ./encryption.txt -r ./ocsp.txt From 6a8a8052171f2a5cefb22c3dbdc116d564cdf5ba Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Mon, 11 Nov 2024 09:33:29 -0600 Subject: [PATCH 097/182] PYTHON-4845 Ensure ALLOWED_HOSTS is optional for Workload Usage (#1998) --- pymongo/asynchronous/auth_oidc.py | 2 +- pymongo/auth_shared.py | 8 ++++--- pymongo/common.py | 6 +++-- pymongo/synchronous/auth_oidc.py | 2 +- test/auth_oidc/test_auth_oidc.py | 38 +++++++++++++++++++++++++++---- 5 files changed, 44 insertions(+), 12 deletions(-) diff --git a/pymongo/asynchronous/auth_oidc.py b/pymongo/asynchronous/auth_oidc.py index f5801b85d4..f1c15045de 100644 --- a/pymongo/asynchronous/auth_oidc.py +++ b/pymongo/asynchronous/auth_oidc.py @@ -55,7 +55,7 @@ def _get_authenticator( properties = credentials.mechanism_properties # Validate that the address is allowed. - if not properties.environment: + if properties.human_callback is not None: found = False allowed_hosts = properties.allowed_hosts for patt in allowed_hosts: diff --git a/pymongo/auth_shared.py b/pymongo/auth_shared.py index 1e1ce7b4d8..9534bd74ad 100644 --- a/pymongo/auth_shared.py +++ b/pymongo/auth_shared.py @@ -100,8 +100,8 @@ def _validate_canonicalize_host_name(value: str | bool) -> str | bool: def _build_credentials_tuple( mech: str, source: Optional[str], - user: str, - passwd: str, + user: Optional[str], + passwd: Optional[str], extra: Mapping[str, Any], database: Optional[str], ) -> MongoCredential: @@ -161,6 +161,8 @@ def _build_credentials_tuple( "::1", ] allowed_hosts = properties.get("ALLOWED_HOSTS", default_allowed) + if properties.get("ALLOWED_HOSTS", None) is not None and human_callback is None: + raise ConfigurationError("ALLOWED_HOSTS is only valid with OIDC_HUMAN_CALLBACK") msg = ( "authentication with MONGODB-OIDC requires providing either a callback or a environment" ) @@ -207,7 +209,7 @@ def _build_credentials_tuple( environment=environ, allowed_hosts=allowed_hosts, token_resource=token_resource, - username=user, + username=user or "", ) return MongoCredential(mech, "$external", user, passwd, oidc_props, _Cache()) diff --git a/pymongo/common.py b/pymongo/common.py index d4601a0eb5..5661de011c 100644 --- a/pymongo/common.py +++ b/pymongo/common.py @@ -873,8 +873,10 @@ def get_setter_key(x: str) -> str: validator = _get_validator(opt, URI_OPTIONS_VALIDATOR_MAP, normed_key=normed_key) validated = validator(opt, value) except (ValueError, TypeError, ConfigurationError) as exc: - if normed_key == "authmechanismproperties" and any( - p in str(exc) for p in _MECH_PROP_MUST_RAISE + if ( + normed_key == "authmechanismproperties" + and any(p in str(exc) for p in _MECH_PROP_MUST_RAISE) + and "is not a supported auth mechanism property" not in str(exc) ): raise if warn: diff --git a/pymongo/synchronous/auth_oidc.py b/pymongo/synchronous/auth_oidc.py index 6381a408ab..5a8967d96b 100644 --- a/pymongo/synchronous/auth_oidc.py +++ b/pymongo/synchronous/auth_oidc.py @@ -55,7 +55,7 @@ def _get_authenticator( properties = credentials.mechanism_properties # Validate that the address is allowed. - if not properties.environment: + if properties.human_callback is not None: found = False allowed_hosts = properties.allowed_hosts for patt in allowed_hosts: diff --git a/test/auth_oidc/test_auth_oidc.py b/test/auth_oidc/test_auth_oidc.py index a0127304c1..7a78f3d2f6 100644 --- a/test/auth_oidc/test_auth_oidc.py +++ b/test/auth_oidc/test_auth_oidc.py @@ -38,11 +38,17 @@ from pymongo._azure_helpers import _get_azure_response from pymongo._gcp_helpers import _get_gcp_response from pymongo.auth_oidc_shared import _get_k8s_token +from pymongo.auth_shared import _build_credentials_tuple from pymongo.cursor_shared import CursorType from pymongo.errors import AutoReconnect, ConfigurationError, OperationFailure from pymongo.hello import HelloCompat from pymongo.operations import InsertOne -from pymongo.synchronous.auth_oidc import OIDCCallback, OIDCCallbackContext, OIDCCallbackResult +from pymongo.synchronous.auth_oidc import ( + OIDCCallback, + OIDCCallbackContext, + OIDCCallbackResult, + _get_authenticator, +) from pymongo.uri_parser import parse_uri ROOT = Path(__file__).parent.parent.resolve() @@ -103,7 +109,6 @@ def fail_point(self, command_args): client.close() -@pytest.mark.auth_oidc class TestAuthOIDCHuman(OIDCTestBase): uri: str @@ -838,12 +843,35 @@ def test_2_4_invalid_client_configuration_with_callback(self): self.create_client(authmechanismproperties=props) def test_2_5_invalid_use_of_ALLOWED_HOSTS(self): - # Create an OIDC configured client with auth mechanism properties `{"ENVIRONMENT": "azure", "ALLOWED_HOSTS": []}`. - props: Dict = {"ENVIRONMENT": "azure", "ALLOWED_HOSTS": []} + # Create an OIDC configured client with auth mechanism properties `{"ENVIRONMENT": "test", "ALLOWED_HOSTS": []}`. + props: Dict = {"ENVIRONMENT": "test", "ALLOWED_HOSTS": []} # Assert it returns a client configuration error. with self.assertRaises(ConfigurationError): self.create_client(authmechanismproperties=props) + # Create an OIDC configured client with auth mechanism properties `{"OIDC_CALLBACK": "", "ALLOWED_HOSTS": []}`. + props: Dict = {"OIDC_CALLBACK": self.create_request_cb(), "ALLOWED_HOSTS": []} + # Assert it returns a client configuration error. + with self.assertRaises(ConfigurationError): + self.create_client(authmechanismproperties=props) + + def test_2_6_ALLOWED_HOSTS_defaults_ignored(self): + # Create a MongoCredential for OIDC with a machine callback. + props = {"OIDC_CALLBACK": self.create_request_cb()} + extra = dict(authmechanismproperties=props) + mongo_creds = _build_credentials_tuple("MONGODB-OIDC", None, "foo", None, extra, "test") + # Assert that creating an authenticator for example.com does not result in an error. + authenticator = _get_authenticator(mongo_creds, ("example.com", 30)) + assert authenticator.properties.username == "foo" + + # Create a MongoCredential for OIDC with an ENVIRONMENT. + props = {"ENVIRONMENT": "test"} + extra = dict(authmechanismproperties=props) + mongo_creds = _build_credentials_tuple("MONGODB-OIDC", None, None, None, extra, "test") + # Assert that creating an authenticator for example.com does not result in an error. + authenticator = _get_authenticator(mongo_creds, ("example.com", 30)) + assert authenticator.properties.username == "" + def test_3_1_authentication_failure_with_cached_tokens_fetch_a_new_token_and_retry(self): # Create a MongoClient and an OIDC callback that implements the provider logic. client = self.create_client() @@ -909,7 +937,7 @@ def test_3_3_unexpected_error_code_does_not_clear_cache(self): # Assert that the callback has been called once. self.assertEqual(self.request_called, 1) - def test_4_1_reauthentication_succeds(self): + def test_4_1_reauthentication_succeeds(self): # Create a ``MongoClient`` configured with a custom OIDC callback that # implements the provider logic. client = self.create_client() From 5b00a3d48a9052b00680b71124ee4ef82358fef7 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Mon, 11 Nov 2024 09:34:05 -0600 Subject: [PATCH 098/182] PYTHON-4956 Generated config cleanup (#2000) --- .evergreen/generated_configs/variants.yml | 454 +++++++++++----------- .evergreen/scripts/configure-env.sh | 5 + .evergreen/scripts/generate_config.py | 188 +++++---- 3 files changed, 336 insertions(+), 311 deletions(-) diff --git a/.evergreen/generated_configs/variants.yml b/.evergreen/generated_configs/variants.yml index 9ee51899f4..928347f567 100644 --- a/.evergreen/generated_configs/variants.yml +++ b/.evergreen/generated_configs/variants.yml @@ -1,9 +1,9 @@ buildvariants: # Alternative hosts tests - - name: openssl-1.0.2-rhel7-py3.9 + - name: openssl-1.0.2-rhel7-python3.9 tasks: - name: .5.0 .standalone !.sync_async - display_name: OpenSSL 1.0.2 RHEL7 py3.9 + display_name: OpenSSL 1.0.2 RHEL7 Python3.9 run_on: - rhel79-small batchtime: 10080 @@ -48,57 +48,57 @@ buildvariants: SKIP_HATCH: "true" # Atlas connect tests - - name: atlas-connect-rhel8-py3.9 + - name: atlas-connect-rhel8-python3.9 tasks: - name: atlas-connect - display_name: Atlas connect RHEL8 py3.9 + display_name: Atlas connect RHEL8 Python3.9 run_on: - rhel87-small expansions: PYTHON_BINARY: /opt/python/3.9/bin/python3 - - name: atlas-connect-rhel8-py3.13 + - name: atlas-connect-rhel8-python3.13 tasks: - name: atlas-connect - display_name: Atlas connect RHEL8 py3.13 + display_name: Atlas connect RHEL8 Python3.13 run_on: - rhel87-small expansions: PYTHON_BINARY: /opt/python/3.13/bin/python3 # Atlas data lake tests - - name: atlas-data-lake-ubuntu-22-py3.9-auth-no-c + - name: atlas-data-lake-ubuntu-22-python3.9-auth-no-c tasks: - name: atlas-data-lake-tests - display_name: Atlas Data Lake Ubuntu-22 py3.9 Auth No C + display_name: Atlas Data Lake Ubuntu-22 Python3.9 Auth No C run_on: - ubuntu2204-small expansions: AUTH: auth NO_EXT: "1" PYTHON_BINARY: /opt/python/3.9/bin/python3 - - name: atlas-data-lake-ubuntu-22-py3.9-auth + - name: atlas-data-lake-ubuntu-22-python3.9-auth tasks: - name: atlas-data-lake-tests - display_name: Atlas Data Lake Ubuntu-22 py3.9 Auth + display_name: Atlas Data Lake Ubuntu-22 Python3.9 Auth run_on: - ubuntu2204-small expansions: AUTH: auth PYTHON_BINARY: /opt/python/3.9/bin/python3 - - name: atlas-data-lake-ubuntu-22-py3.13-auth-no-c + - name: atlas-data-lake-ubuntu-22-python3.13-auth-no-c tasks: - name: atlas-data-lake-tests - display_name: Atlas Data Lake Ubuntu-22 py3.13 Auth No C + display_name: Atlas Data Lake Ubuntu-22 Python3.13 Auth No C run_on: - ubuntu2204-small expansions: AUTH: auth NO_EXT: "1" PYTHON_BINARY: /opt/python/3.13/bin/python3 - - name: atlas-data-lake-ubuntu-22-py3.13-auth + - name: atlas-data-lake-ubuntu-22-python3.13-auth tasks: - name: atlas-data-lake-tests - display_name: Atlas Data Lake Ubuntu-22 py3.13 Auth + display_name: Atlas Data Lake Ubuntu-22 Python3.13 Auth run_on: - ubuntu2204-small expansions: @@ -106,7 +106,7 @@ buildvariants: PYTHON_BINARY: /opt/python/3.13/bin/python3 # Aws auth tests - - name: auth-aws-ubuntu-20-py3.9 + - name: auth-aws-ubuntu-20-python3.9 tasks: - name: aws-auth-test-4.4 - name: aws-auth-test-5.0 @@ -115,12 +115,12 @@ buildvariants: - name: aws-auth-test-8.0 - name: aws-auth-test-rapid - name: aws-auth-test-latest - display_name: Auth AWS Ubuntu-20 py3.9 + display_name: Auth AWS Ubuntu-20 Python3.9 run_on: - ubuntu2004-small expansions: PYTHON_BINARY: /opt/python/3.9/bin/python3 - - name: auth-aws-ubuntu-20-py3.13 + - name: auth-aws-ubuntu-20-python3.13 tasks: - name: aws-auth-test-4.4 - name: aws-auth-test-5.0 @@ -129,12 +129,12 @@ buildvariants: - name: aws-auth-test-8.0 - name: aws-auth-test-rapid - name: aws-auth-test-latest - display_name: Auth AWS Ubuntu-20 py3.13 + display_name: Auth AWS Ubuntu-20 Python3.13 run_on: - ubuntu2004-small expansions: PYTHON_BINARY: /opt/python/3.13/bin/python3 - - name: auth-aws-win64-py3.9 + - name: auth-aws-win64-python3.9 tasks: - name: aws-auth-test-4.4 - name: aws-auth-test-5.0 @@ -143,13 +143,13 @@ buildvariants: - name: aws-auth-test-8.0 - name: aws-auth-test-rapid - name: aws-auth-test-latest - display_name: Auth AWS Win64 py3.9 + display_name: Auth AWS Win64 Python3.9 run_on: - windows-64-vsMulti-small expansions: skip_ECS_auth_test: "true" PYTHON_BINARY: C:/python/Python39/python.exe - - name: auth-aws-win64-py3.13 + - name: auth-aws-win64-python3.13 tasks: - name: aws-auth-test-4.4 - name: aws-auth-test-5.0 @@ -158,13 +158,13 @@ buildvariants: - name: aws-auth-test-8.0 - name: aws-auth-test-rapid - name: aws-auth-test-latest - display_name: Auth AWS Win64 py3.13 + display_name: Auth AWS Win64 Python3.13 run_on: - windows-64-vsMulti-small expansions: skip_ECS_auth_test: "true" PYTHON_BINARY: C:/python/Python313/python.exe - - name: auth-aws-macos-py3.9 + - name: auth-aws-macos-python3.9 tasks: - name: aws-auth-test-4.4 - name: aws-auth-test-5.0 @@ -173,7 +173,7 @@ buildvariants: - name: aws-auth-test-8.0 - name: aws-auth-test-rapid - name: aws-auth-test-latest - display_name: Auth AWS macOS py3.9 + display_name: Auth AWS macOS Python3.9 run_on: - macos-14 expansions: @@ -181,7 +181,7 @@ buildvariants: skip_EC2_auth_test: "true" skip_web_identity_auth_test: "true" PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 - - name: auth-aws-macos-py3.13 + - name: auth-aws-macos-python3.13 tasks: - name: aws-auth-test-4.4 - name: aws-auth-test-5.0 @@ -190,7 +190,7 @@ buildvariants: - name: aws-auth-test-8.0 - name: aws-auth-test-rapid - name: aws-auth-test-latest - display_name: Auth AWS macOS py3.13 + display_name: Auth AWS macOS Python3.13 run_on: - macos-14 expansions: @@ -200,58 +200,58 @@ buildvariants: PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 # Compression tests - - name: compression-snappy-rhel8-py3.9-no-c + - name: compression-snappy-rhel8-python3.9-no-c tasks: - name: .standalone .noauth .nossl .sync_async - display_name: Compression snappy RHEL8 py3.9 No C + display_name: Compression snappy RHEL8 Python3.9 No C run_on: - rhel87-small expansions: COMPRESSORS: snappy NO_EXT: "1" PYTHON_BINARY: /opt/python/3.9/bin/python3 - - name: compression-snappy-rhel8-py3.10 + - name: compression-snappy-rhel8-python3.10 tasks: - name: .standalone .noauth .nossl .sync_async - display_name: Compression snappy RHEL8 py3.10 + display_name: Compression snappy RHEL8 Python3.10 run_on: - rhel87-small expansions: COMPRESSORS: snappy PYTHON_BINARY: /opt/python/3.10/bin/python3 - - name: compression-zlib-rhel8-py3.11-no-c + - name: compression-zlib-rhel8-python3.11-no-c tasks: - name: .standalone .noauth .nossl .sync_async - display_name: Compression zlib RHEL8 py3.11 No C + display_name: Compression zlib RHEL8 Python3.11 No C run_on: - rhel87-small expansions: COMPRESSORS: zlib NO_EXT: "1" PYTHON_BINARY: /opt/python/3.11/bin/python3 - - name: compression-zlib-rhel8-py3.12 + - name: compression-zlib-rhel8-python3.12 tasks: - name: .standalone .noauth .nossl .sync_async - display_name: Compression zlib RHEL8 py3.12 + display_name: Compression zlib RHEL8 Python3.12 run_on: - rhel87-small expansions: COMPRESSORS: zlib PYTHON_BINARY: /opt/python/3.12/bin/python3 - - name: compression-zstd-rhel8-py3.13-no-c + - name: compression-zstd-rhel8-python3.13-no-c tasks: - name: .standalone .noauth .nossl .sync_async !.4.0 - display_name: Compression zstd RHEL8 py3.13 No C + display_name: Compression zstd RHEL8 Python3.13 No C run_on: - rhel87-small expansions: COMPRESSORS: zstd NO_EXT: "1" PYTHON_BINARY: /opt/python/3.13/bin/python3 - - name: compression-zstd-rhel8-py3.9 + - name: compression-zstd-rhel8-python3.9 tasks: - name: .standalone .noauth .nossl .sync_async !.4.0 - display_name: Compression zstd RHEL8 py3.9 + display_name: Compression zstd RHEL8 Python3.9 run_on: - rhel87-small expansions: @@ -260,7 +260,7 @@ buildvariants: - name: compression-snappy-rhel8-pypy3.9 tasks: - name: .standalone .noauth .nossl .sync_async - display_name: Compression snappy RHEL8 pypy3.9 + display_name: Compression snappy RHEL8 PyPy3.9 run_on: - rhel87-small expansions: @@ -269,7 +269,7 @@ buildvariants: - name: compression-zlib-rhel8-pypy3.10 tasks: - name: .standalone .noauth .nossl .sync_async - display_name: Compression zlib RHEL8 pypy3.10 + display_name: Compression zlib RHEL8 PyPy3.10 run_on: - rhel87-small expansions: @@ -278,7 +278,7 @@ buildvariants: - name: compression-zstd-rhel8-pypy3.9 tasks: - name: .standalone .noauth .nossl .sync_async !.4.0 - display_name: Compression zstd RHEL8 pypy3.9 + display_name: Compression zstd RHEL8 PyPy3.9 run_on: - rhel87-small expansions: @@ -286,10 +286,10 @@ buildvariants: PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 # Disable test commands tests - - name: disable-test-commands-rhel8-py3.9 + - name: disable-test-commands-rhel8-python3.9 tasks: - name: .latest .sync_async - display_name: Disable test commands RHEL8 py3.9 + display_name: Disable test commands RHEL8 Python3.9 run_on: - rhel87-small expansions: @@ -299,22 +299,22 @@ buildvariants: PYTHON_BINARY: /opt/python/3.9/bin/python3 # Doctests tests - - name: doctests-rhel8-py3.9 + - name: doctests-rhel8-python3.9 tasks: - name: doctests - display_name: Doctests RHEL8 py3.9 + display_name: Doctests RHEL8 Python3.9 run_on: - rhel87-small expansions: PYTHON_BINARY: /opt/python/3.9/bin/python3 # Encryption tests - - name: encryption-rhel8-py3.9 + - name: encryption-rhel8-python3.9 tasks: - name: .sharded_cluster .auth .ssl .sync_async - name: .replica_set .noauth .ssl .sync_async - name: .standalone .noauth .nossl .sync_async - display_name: Encryption RHEL8 py3.9 + display_name: Encryption RHEL8 Python3.9 run_on: - rhel87-small batchtime: 10080 @@ -322,12 +322,12 @@ buildvariants: test_encryption: "true" PYTHON_BINARY: /opt/python/3.9/bin/python3 tags: [encryption_tag] - - name: encryption-rhel8-py3.13 + - name: encryption-rhel8-python3.13 tasks: - name: .sharded_cluster .auth .ssl .sync_async - name: .replica_set .noauth .ssl .sync_async - name: .standalone .noauth .nossl .sync_async - display_name: Encryption RHEL8 py3.13 + display_name: Encryption RHEL8 Python3.13 run_on: - rhel87-small batchtime: 10080 @@ -340,7 +340,7 @@ buildvariants: - name: .sharded_cluster .auth .ssl .sync_async - name: .replica_set .noauth .ssl .sync_async - name: .standalone .noauth .nossl .sync_async - display_name: Encryption RHEL8 pypy3.10 + display_name: Encryption RHEL8 PyPy3.10 run_on: - rhel87-small batchtime: 10080 @@ -348,12 +348,12 @@ buildvariants: test_encryption: "true" PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 tags: [encryption_tag] - - name: encryption-crypt_shared-rhel8-py3.9 + - name: encryption-crypt_shared-rhel8-python3.9 tasks: - name: .sharded_cluster .auth .ssl .sync_async - name: .replica_set .noauth .ssl .sync_async - name: .standalone .noauth .nossl .sync_async - display_name: Encryption crypt_shared RHEL8 py3.9 + display_name: Encryption crypt_shared RHEL8 Python3.9 run_on: - rhel87-small batchtime: 10080 @@ -362,12 +362,12 @@ buildvariants: test_crypt_shared: "true" PYTHON_BINARY: /opt/python/3.9/bin/python3 tags: [encryption_tag] - - name: encryption-crypt_shared-rhel8-py3.13 + - name: encryption-crypt_shared-rhel8-python3.13 tasks: - name: .sharded_cluster .auth .ssl .sync_async - name: .replica_set .noauth .ssl .sync_async - name: .standalone .noauth .nossl .sync_async - display_name: Encryption crypt_shared RHEL8 py3.13 + display_name: Encryption crypt_shared RHEL8 Python3.13 run_on: - rhel87-small batchtime: 10080 @@ -381,7 +381,7 @@ buildvariants: - name: .sharded_cluster .auth .ssl .sync_async - name: .replica_set .noauth .ssl .sync_async - name: .standalone .noauth .nossl .sync_async - display_name: Encryption crypt_shared RHEL8 pypy3.10 + display_name: Encryption crypt_shared RHEL8 PyPy3.10 run_on: - rhel87-small batchtime: 10080 @@ -390,12 +390,12 @@ buildvariants: test_crypt_shared: "true" PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 tags: [encryption_tag] - - name: encryption-pyopenssl-rhel8-py3.9 + - name: encryption-pyopenssl-rhel8-python3.9 tasks: - name: .sharded_cluster .auth .ssl .sync_async - name: .replica_set .noauth .ssl .sync_async - name: .standalone .noauth .nossl .sync_async - display_name: Encryption PyOpenSSL RHEL8 py3.9 + display_name: Encryption PyOpenSSL RHEL8 Python3.9 run_on: - rhel87-small batchtime: 10080 @@ -404,12 +404,12 @@ buildvariants: test_encryption_pyopenssl: "true" PYTHON_BINARY: /opt/python/3.9/bin/python3 tags: [encryption_tag] - - name: encryption-pyopenssl-rhel8-py3.13 + - name: encryption-pyopenssl-rhel8-python3.13 tasks: - name: .sharded_cluster .auth .ssl .sync_async - name: .replica_set .noauth .ssl .sync_async - name: .standalone .noauth .nossl .sync_async - display_name: Encryption PyOpenSSL RHEL8 py3.13 + display_name: Encryption PyOpenSSL RHEL8 Python3.13 run_on: - rhel87-small batchtime: 10080 @@ -423,7 +423,7 @@ buildvariants: - name: .sharded_cluster .auth .ssl .sync_async - name: .replica_set .noauth .ssl .sync_async - name: .standalone .noauth .nossl .sync_async - display_name: Encryption PyOpenSSL RHEL8 pypy3.10 + display_name: Encryption PyOpenSSL RHEL8 PyPy3.10 run_on: - rhel87-small batchtime: 10080 @@ -432,29 +432,29 @@ buildvariants: test_encryption_pyopenssl: "true" PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 tags: [encryption_tag] - - name: encryption-rhel8-py3.10 + - name: encryption-rhel8-python3.10 tasks: - name: .sharded_cluster .auth .ssl .sync_async - display_name: Encryption RHEL8 py3.10 + display_name: Encryption RHEL8 Python3.10 run_on: - rhel87-small expansions: test_encryption: "true" PYTHON_BINARY: /opt/python/3.10/bin/python3 - - name: encryption-crypt_shared-rhel8-py3.11 + - name: encryption-crypt_shared-rhel8-python3.11 tasks: - name: .replica_set .noauth .ssl .sync_async - display_name: Encryption crypt_shared RHEL8 py3.11 + display_name: Encryption crypt_shared RHEL8 Python3.11 run_on: - rhel87-small expansions: test_encryption: "true" test_crypt_shared: "true" PYTHON_BINARY: /opt/python/3.11/bin/python3 - - name: encryption-pyopenssl-rhel8-py3.12 + - name: encryption-pyopenssl-rhel8-python3.12 tasks: - name: .standalone .noauth .nossl .sync_async - display_name: Encryption PyOpenSSL RHEL8 py3.12 + display_name: Encryption PyOpenSSL RHEL8 Python3.12 run_on: - rhel87-small expansions: @@ -464,16 +464,16 @@ buildvariants: - name: encryption-rhel8-pypy3.9 tasks: - name: .sharded_cluster .auth .ssl .sync_async - display_name: Encryption RHEL8 pypy3.9 + display_name: Encryption RHEL8 PyPy3.9 run_on: - rhel87-small expansions: test_encryption: "true" PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 - - name: encryption-macos-py3.9 + - name: encryption-macos-python3.9 tasks: - name: .latest .replica_set .sync_async - display_name: Encryption macOS py3.9 + display_name: Encryption macOS Python3.9 run_on: - macos-14 batchtime: 10080 @@ -481,10 +481,10 @@ buildvariants: test_encryption: "true" PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 tags: [encryption_tag] - - name: encryption-macos-py3.13 + - name: encryption-macos-python3.13 tasks: - name: .latest .replica_set .sync_async - display_name: Encryption macOS py3.13 + display_name: Encryption macOS Python3.13 run_on: - macos-14 batchtime: 10080 @@ -492,10 +492,10 @@ buildvariants: test_encryption: "true" PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 tags: [encryption_tag] - - name: encryption-crypt_shared-macos-py3.9 + - name: encryption-crypt_shared-macos-python3.9 tasks: - name: .latest .replica_set .sync_async - display_name: Encryption crypt_shared macOS py3.9 + display_name: Encryption crypt_shared macOS Python3.9 run_on: - macos-14 batchtime: 10080 @@ -504,10 +504,10 @@ buildvariants: test_crypt_shared: "true" PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 tags: [encryption_tag] - - name: encryption-crypt_shared-macos-py3.13 + - name: encryption-crypt_shared-macos-python3.13 tasks: - name: .latest .replica_set .sync_async - display_name: Encryption crypt_shared macOS py3.13 + display_name: Encryption crypt_shared macOS Python3.13 run_on: - macos-14 batchtime: 10080 @@ -516,10 +516,10 @@ buildvariants: test_crypt_shared: "true" PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 tags: [encryption_tag] - - name: encryption-win64-py3.9 + - name: encryption-win64-python3.9 tasks: - name: .latest .replica_set .sync_async - display_name: Encryption Win64 py3.9 + display_name: Encryption Win64 Python3.9 run_on: - windows-64-vsMulti-small batchtime: 10080 @@ -527,10 +527,10 @@ buildvariants: test_encryption: "true" PYTHON_BINARY: C:/python/Python39/python.exe tags: [encryption_tag] - - name: encryption-win64-py3.13 + - name: encryption-win64-python3.13 tasks: - name: .latest .replica_set .sync_async - display_name: Encryption Win64 py3.13 + display_name: Encryption Win64 Python3.13 run_on: - windows-64-vsMulti-small batchtime: 10080 @@ -538,10 +538,10 @@ buildvariants: test_encryption: "true" PYTHON_BINARY: C:/python/Python313/python.exe tags: [encryption_tag] - - name: encryption-crypt_shared-win64-py3.9 + - name: encryption-crypt_shared-win64-python3.9 tasks: - name: .latest .replica_set .sync_async - display_name: Encryption crypt_shared Win64 py3.9 + display_name: Encryption crypt_shared Win64 Python3.9 run_on: - windows-64-vsMulti-small batchtime: 10080 @@ -550,10 +550,10 @@ buildvariants: test_crypt_shared: "true" PYTHON_BINARY: C:/python/Python39/python.exe tags: [encryption_tag] - - name: encryption-crypt_shared-win64-py3.13 + - name: encryption-crypt_shared-win64-python3.13 tasks: - name: .latest .replica_set .sync_async - display_name: Encryption crypt_shared Win64 py3.13 + display_name: Encryption crypt_shared Win64 Python3.13 run_on: - windows-64-vsMulti-small batchtime: 10080 @@ -564,46 +564,46 @@ buildvariants: tags: [encryption_tag] # Enterprise auth tests - - name: auth-enterprise-macos-py3.9-auth + - name: auth-enterprise-macos-python3.9-auth tasks: - name: test-enterprise-auth - display_name: Auth Enterprise macOS py3.9 Auth + display_name: Auth Enterprise macOS Python3.9 Auth run_on: - macos-14 expansions: AUTH: auth PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 - - name: auth-enterprise-rhel8-py3.10-auth + - name: auth-enterprise-rhel8-python3.10-auth tasks: - name: test-enterprise-auth - display_name: Auth Enterprise RHEL8 py3.10 Auth + display_name: Auth Enterprise RHEL8 Python3.10 Auth run_on: - rhel87-small expansions: AUTH: auth PYTHON_BINARY: /opt/python/3.10/bin/python3 - - name: auth-enterprise-rhel8-py3.11-auth + - name: auth-enterprise-rhel8-python3.11-auth tasks: - name: test-enterprise-auth - display_name: Auth Enterprise RHEL8 py3.11 Auth + display_name: Auth Enterprise RHEL8 Python3.11 Auth run_on: - rhel87-small expansions: AUTH: auth PYTHON_BINARY: /opt/python/3.11/bin/python3 - - name: auth-enterprise-rhel8-py3.12-auth + - name: auth-enterprise-rhel8-python3.12-auth tasks: - name: test-enterprise-auth - display_name: Auth Enterprise RHEL8 py3.12 Auth + display_name: Auth Enterprise RHEL8 Python3.12 Auth run_on: - rhel87-small expansions: AUTH: auth PYTHON_BINARY: /opt/python/3.12/bin/python3 - - name: auth-enterprise-win64-py3.13-auth + - name: auth-enterprise-win64-python3.13-auth tasks: - name: test-enterprise-auth - display_name: Auth Enterprise Win64 py3.13 Auth + display_name: Auth Enterprise Win64 Python3.13 Auth run_on: - windows-64-vsMulti-small expansions: @@ -612,7 +612,7 @@ buildvariants: - name: auth-enterprise-rhel8-pypy3.9-auth tasks: - name: test-enterprise-auth - display_name: Auth Enterprise RHEL8 pypy3.9 Auth + display_name: Auth Enterprise RHEL8 PyPy3.9 Auth run_on: - rhel87-small expansions: @@ -621,7 +621,7 @@ buildvariants: - name: auth-enterprise-rhel8-pypy3.10-auth tasks: - name: test-enterprise-auth - display_name: Auth Enterprise RHEL8 pypy3.10 Auth + display_name: Auth Enterprise RHEL8 PyPy3.10 Auth run_on: - rhel87-small expansions: @@ -629,10 +629,10 @@ buildvariants: PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 # Green framework tests - - name: green-eventlet-rhel8-py3.9 + - name: green-eventlet-rhel8-python3.9 tasks: - name: .standalone .noauth .nossl .sync_async - display_name: Green Eventlet RHEL8 py3.9 + display_name: Green Eventlet RHEL8 Python3.9 run_on: - rhel87-small expansions: @@ -640,10 +640,10 @@ buildvariants: AUTH: auth SSL: ssl PYTHON_BINARY: /opt/python/3.9/bin/python3 - - name: green-gevent-rhel8-py3.9 + - name: green-gevent-rhel8-python3.9 tasks: - name: .standalone .noauth .nossl .sync_async - display_name: Green Gevent RHEL8 py3.9 + display_name: Green Gevent RHEL8 Python3.9 run_on: - rhel87-small expansions: @@ -651,10 +651,10 @@ buildvariants: AUTH: auth SSL: ssl PYTHON_BINARY: /opt/python/3.9/bin/python3 - - name: green-eventlet-rhel8-py3.12 + - name: green-eventlet-rhel8-python3.12 tasks: - name: .standalone .noauth .nossl .sync_async - display_name: Green Eventlet RHEL8 py3.12 + display_name: Green Eventlet RHEL8 Python3.12 run_on: - rhel87-small expansions: @@ -662,10 +662,10 @@ buildvariants: AUTH: auth SSL: ssl PYTHON_BINARY: /opt/python/3.12/bin/python3 - - name: green-gevent-rhel8-py3.12 + - name: green-gevent-rhel8-python3.12 tasks: - name: .standalone .noauth .nossl .sync_async - display_name: Green Gevent RHEL8 py3.12 + display_name: Green Gevent RHEL8 Python3.12 run_on: - rhel87-small expansions: @@ -675,87 +675,87 @@ buildvariants: PYTHON_BINARY: /opt/python/3.12/bin/python3 # Load balancer tests - - name: load-balancer-rhel8-v6.0-py3.9 + - name: load-balancer-rhel8-v6.0-python3.9 tasks: - name: .load-balancer - display_name: Load Balancer RHEL8 v6.0 py3.9 + display_name: Load Balancer RHEL8 v6.0 Python3.9 run_on: - rhel87-small batchtime: 10080 expansions: - PYTHON_BINARY: /opt/python/3.9/bin/python3 VERSION: "6.0" - - name: load-balancer-rhel8-v7.0-py3.9 + PYTHON_BINARY: /opt/python/3.9/bin/python3 + - name: load-balancer-rhel8-v7.0-python3.9 tasks: - name: .load-balancer - display_name: Load Balancer RHEL8 v7.0 py3.9 + display_name: Load Balancer RHEL8 v7.0 Python3.9 run_on: - rhel87-small batchtime: 10080 expansions: - PYTHON_BINARY: /opt/python/3.9/bin/python3 VERSION: "7.0" - - name: load-balancer-rhel8-v8.0-py3.9 + PYTHON_BINARY: /opt/python/3.9/bin/python3 + - name: load-balancer-rhel8-v8.0-python3.9 tasks: - name: .load-balancer - display_name: Load Balancer RHEL8 v8.0 py3.9 + display_name: Load Balancer RHEL8 v8.0 Python3.9 run_on: - rhel87-small batchtime: 10080 expansions: - PYTHON_BINARY: /opt/python/3.9/bin/python3 VERSION: "8.0" - - name: load-balancer-rhel8-rapid-py3.9 + PYTHON_BINARY: /opt/python/3.9/bin/python3 + - name: load-balancer-rhel8-rapid-python3.9 tasks: - name: .load-balancer - display_name: Load Balancer RHEL8 rapid py3.9 + display_name: Load Balancer RHEL8 rapid Python3.9 run_on: - rhel87-small batchtime: 10080 expansions: - PYTHON_BINARY: /opt/python/3.9/bin/python3 VERSION: rapid - - name: load-balancer-rhel8-latest-py3.9 + PYTHON_BINARY: /opt/python/3.9/bin/python3 + - name: load-balancer-rhel8-latest-python3.9 tasks: - name: .load-balancer - display_name: Load Balancer RHEL8 latest py3.9 + display_name: Load Balancer RHEL8 latest Python3.9 run_on: - rhel87-small batchtime: 10080 expansions: - PYTHON_BINARY: /opt/python/3.9/bin/python3 VERSION: latest + PYTHON_BINARY: /opt/python/3.9/bin/python3 # Mockupdb tests - - name: mockupdb-rhel8-py3.9 + - name: mockupdb-rhel8-python3.9 tasks: - name: mockupdb - display_name: MockupDB RHEL8 py3.9 + display_name: MockupDB RHEL8 Python3.9 run_on: - rhel87-small expansions: PYTHON_BINARY: /opt/python/3.9/bin/python3 # Mod wsgi tests - - name: mod_wsgi-ubuntu-22-py3.9 + - name: mod_wsgi-ubuntu-22-python3.9 tasks: - name: mod-wsgi-standalone - name: mod-wsgi-replica-set - name: mod-wsgi-embedded-mode-standalone - name: mod-wsgi-embedded-mode-replica-set - display_name: mod_wsgi Ubuntu-22 py3.9 + display_name: mod_wsgi Ubuntu-22 Python3.9 run_on: - ubuntu2204-small expansions: MOD_WSGI_VERSION: "4" PYTHON_BINARY: /opt/python/3.9/bin/python3 - - name: mod_wsgi-ubuntu-22-py3.13 + - name: mod_wsgi-ubuntu-22-python3.13 tasks: - name: mod-wsgi-standalone - name: mod-wsgi-replica-set - name: mod-wsgi-embedded-mode-standalone - name: mod-wsgi-embedded-mode-replica-set - display_name: mod_wsgi Ubuntu-22 py3.13 + display_name: mod_wsgi Ubuntu-22 Python3.13 run_on: - ubuntu2204-small expansions: @@ -763,46 +763,46 @@ buildvariants: PYTHON_BINARY: /opt/python/3.13/bin/python3 # No c ext tests - - name: no-c-ext-rhel8-py3.9 + - name: no-c-ext-rhel8-python3.9 tasks: - name: .standalone .noauth .nossl .sync_async - display_name: No C Ext RHEL8 py3.9 + display_name: No C Ext RHEL8 Python3.9 run_on: - rhel87-small expansions: NO_EXT: "1" PYTHON_BINARY: /opt/python/3.9/bin/python3 - - name: no-c-ext-rhel8-py3.10 + - name: no-c-ext-rhel8-python3.10 tasks: - name: .replica_set .noauth .nossl .sync_async - display_name: No C Ext RHEL8 py3.10 + display_name: No C Ext RHEL8 Python3.10 run_on: - rhel87-small expansions: NO_EXT: "1" PYTHON_BINARY: /opt/python/3.10/bin/python3 - - name: no-c-ext-rhel8-py3.11 + - name: no-c-ext-rhel8-python3.11 tasks: - name: .sharded_cluster .noauth .nossl .sync_async - display_name: No C Ext RHEL8 py3.11 + display_name: No C Ext RHEL8 Python3.11 run_on: - rhel87-small expansions: NO_EXT: "1" PYTHON_BINARY: /opt/python/3.11/bin/python3 - - name: no-c-ext-rhel8-py3.12 + - name: no-c-ext-rhel8-python3.12 tasks: - name: .standalone .noauth .nossl .sync_async - display_name: No C Ext RHEL8 py3.12 + display_name: No C Ext RHEL8 Python3.12 run_on: - rhel87-small expansions: NO_EXT: "1" PYTHON_BINARY: /opt/python/3.12/bin/python3 - - name: no-c-ext-rhel8-py3.13 + - name: no-c-ext-rhel8-python3.13 tasks: - name: .replica_set .noauth .nossl .sync_async - display_name: No C Ext RHEL8 py3.13 + display_name: No C Ext RHEL8 Python3.13 run_on: - rhel87-small expansions: @@ -810,10 +810,10 @@ buildvariants: PYTHON_BINARY: /opt/python/3.13/bin/python3 # Ocsp tests - - name: ocsp-rhel8-v4.4-py3.9 + - name: ocsp-rhel8-v4.4-python3.9 tasks: - name: .ocsp - display_name: OCSP RHEL8 v4.4 py3.9 + display_name: OCSP RHEL8 v4.4 Python3.9 run_on: - rhel87-small batchtime: 20160 @@ -821,12 +821,12 @@ buildvariants: AUTH: noauth SSL: ssl TOPOLOGY: server - PYTHON_BINARY: /opt/python/3.9/bin/python3 VERSION: "4.4" - - name: ocsp-rhel8-v5.0-py3.10 + PYTHON_BINARY: /opt/python/3.9/bin/python3 + - name: ocsp-rhel8-v5.0-python3.10 tasks: - name: .ocsp - display_name: OCSP RHEL8 v5.0 py3.10 + display_name: OCSP RHEL8 v5.0 Python3.10 run_on: - rhel87-small batchtime: 20160 @@ -834,12 +834,12 @@ buildvariants: AUTH: noauth SSL: ssl TOPOLOGY: server - PYTHON_BINARY: /opt/python/3.10/bin/python3 VERSION: "5.0" - - name: ocsp-rhel8-v6.0-py3.11 + PYTHON_BINARY: /opt/python/3.10/bin/python3 + - name: ocsp-rhel8-v6.0-python3.11 tasks: - name: .ocsp - display_name: OCSP RHEL8 v6.0 py3.11 + display_name: OCSP RHEL8 v6.0 Python3.11 run_on: - rhel87-small batchtime: 20160 @@ -847,12 +847,12 @@ buildvariants: AUTH: noauth SSL: ssl TOPOLOGY: server - PYTHON_BINARY: /opt/python/3.11/bin/python3 VERSION: "6.0" - - name: ocsp-rhel8-v7.0-py3.12 + PYTHON_BINARY: /opt/python/3.11/bin/python3 + - name: ocsp-rhel8-v7.0-python3.12 tasks: - name: .ocsp - display_name: OCSP RHEL8 v7.0 py3.12 + display_name: OCSP RHEL8 v7.0 Python3.12 run_on: - rhel87-small batchtime: 20160 @@ -860,12 +860,12 @@ buildvariants: AUTH: noauth SSL: ssl TOPOLOGY: server - PYTHON_BINARY: /opt/python/3.12/bin/python3 VERSION: "7.0" - - name: ocsp-rhel8-v8.0-py3.13 + PYTHON_BINARY: /opt/python/3.12/bin/python3 + - name: ocsp-rhel8-v8.0-python3.13 tasks: - name: .ocsp - display_name: OCSP RHEL8 v8.0 py3.13 + display_name: OCSP RHEL8 v8.0 Python3.13 run_on: - rhel87-small batchtime: 20160 @@ -873,12 +873,12 @@ buildvariants: AUTH: noauth SSL: ssl TOPOLOGY: server - PYTHON_BINARY: /opt/python/3.13/bin/python3 VERSION: "8.0" + PYTHON_BINARY: /opt/python/3.13/bin/python3 - name: ocsp-rhel8-rapid-pypy3.9 tasks: - name: .ocsp - display_name: OCSP RHEL8 rapid pypy3.9 + display_name: OCSP RHEL8 rapid PyPy3.9 run_on: - rhel87-small batchtime: 20160 @@ -886,12 +886,12 @@ buildvariants: AUTH: noauth SSL: ssl TOPOLOGY: server - PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 VERSION: rapid + PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 - name: ocsp-rhel8-latest-pypy3.10 tasks: - name: .ocsp - display_name: OCSP RHEL8 latest pypy3.10 + display_name: OCSP RHEL8 latest PyPy3.10 run_on: - rhel87-small batchtime: 20160 @@ -899,12 +899,12 @@ buildvariants: AUTH: noauth SSL: ssl TOPOLOGY: server - PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 VERSION: latest - - name: ocsp-win64-v4.4-py3.9 + PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 + - name: ocsp-win64-v4.4-python3.9 tasks: - name: .ocsp-rsa !.ocsp-staple - display_name: OCSP Win64 v4.4 py3.9 + display_name: OCSP Win64 v4.4 Python3.9 run_on: - windows-64-vsMulti-small batchtime: 20160 @@ -912,12 +912,12 @@ buildvariants: AUTH: noauth SSL: ssl TOPOLOGY: server - PYTHON_BINARY: C:/python/Python39/python.exe VERSION: "4.4" - - name: ocsp-win64-v8.0-py3.13 + PYTHON_BINARY: C:/python/Python39/python.exe + - name: ocsp-win64-v8.0-python3.13 tasks: - name: .ocsp-rsa !.ocsp-staple - display_name: OCSP Win64 v8.0 py3.13 + display_name: OCSP Win64 v8.0 Python3.13 run_on: - windows-64-vsMulti-small batchtime: 20160 @@ -925,12 +925,12 @@ buildvariants: AUTH: noauth SSL: ssl TOPOLOGY: server - PYTHON_BINARY: C:/python/Python313/python.exe VERSION: "8.0" - - name: ocsp-macos-v4.4-py3.9 + PYTHON_BINARY: C:/python/Python313/python.exe + - name: ocsp-macos-v4.4-python3.9 tasks: - name: .ocsp-rsa !.ocsp-staple - display_name: OCSP macOS v4.4 py3.9 + display_name: OCSP macOS v4.4 Python3.9 run_on: - macos-14 batchtime: 20160 @@ -938,12 +938,12 @@ buildvariants: AUTH: noauth SSL: ssl TOPOLOGY: server - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 VERSION: "4.4" - - name: ocsp-macos-v8.0-py3.13 + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 + - name: ocsp-macos-v8.0-python3.13 tasks: - name: .ocsp-rsa !.ocsp-staple - display_name: OCSP macOS v8.0 py3.13 + display_name: OCSP macOS v8.0 Python3.13 run_on: - macos-14 batchtime: 20160 @@ -951,8 +951,8 @@ buildvariants: AUTH: noauth SSL: ssl TOPOLOGY: server - PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 VERSION: "8.0" + PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 # Oidc auth tests - name: auth-oidc-ubuntu-22 @@ -981,55 +981,55 @@ buildvariants: batchtime: 20160 # Pyopenssl tests - - name: pyopenssl-macos-py3.9 + - name: pyopenssl-macos-python3.9 tasks: - name: .replica_set .noauth .nossl .sync_async - name: .7.0 .noauth .nossl .sync_async - display_name: PyOpenSSL macOS py3.9 + display_name: PyOpenSSL macOS Python3.9 run_on: - macos-14 batchtime: 10080 expansions: test_pyopenssl: "true" PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 - - name: pyopenssl-rhel8-py3.10 + - name: pyopenssl-rhel8-python3.10 tasks: - name: .replica_set .auth .ssl .sync_async - name: .7.0 .auth .ssl .sync_async - display_name: PyOpenSSL RHEL8 py3.10 + display_name: PyOpenSSL RHEL8 Python3.10 run_on: - rhel87-small batchtime: 10080 expansions: test_pyopenssl: "true" PYTHON_BINARY: /opt/python/3.10/bin/python3 - - name: pyopenssl-rhel8-py3.11 + - name: pyopenssl-rhel8-python3.11 tasks: - name: .replica_set .auth .ssl .sync_async - name: .7.0 .auth .ssl .sync_async - display_name: PyOpenSSL RHEL8 py3.11 + display_name: PyOpenSSL RHEL8 Python3.11 run_on: - rhel87-small batchtime: 10080 expansions: test_pyopenssl: "true" PYTHON_BINARY: /opt/python/3.11/bin/python3 - - name: pyopenssl-rhel8-py3.12 + - name: pyopenssl-rhel8-python3.12 tasks: - name: .replica_set .auth .ssl .sync_async - name: .7.0 .auth .ssl .sync_async - display_name: PyOpenSSL RHEL8 py3.12 + display_name: PyOpenSSL RHEL8 Python3.12 run_on: - rhel87-small batchtime: 10080 expansions: test_pyopenssl: "true" PYTHON_BINARY: /opt/python/3.12/bin/python3 - - name: pyopenssl-win64-py3.13 + - name: pyopenssl-win64-python3.13 tasks: - name: .replica_set .auth .ssl .sync_async - name: .7.0 .auth .ssl .sync_async - display_name: PyOpenSSL Win64 py3.13 + display_name: PyOpenSSL Win64 Python3.13 run_on: - windows-64-vsMulti-small batchtime: 10080 @@ -1040,7 +1040,7 @@ buildvariants: tasks: - name: .replica_set .auth .ssl .sync_async - name: .7.0 .auth .ssl .sync_async - display_name: PyOpenSSL RHEL8 pypy3.9 + display_name: PyOpenSSL RHEL8 PyPy3.9 run_on: - rhel87-small batchtime: 10080 @@ -1051,7 +1051,7 @@ buildvariants: tasks: - name: .replica_set .auth .ssl .sync_async - name: .7.0 .auth .ssl .sync_async - display_name: PyOpenSSL RHEL8 pypy3.10 + display_name: PyOpenSSL RHEL8 PyPy3.10 run_on: - rhel87-small batchtime: 10080 @@ -1060,34 +1060,34 @@ buildvariants: PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 # Search index tests - - name: search-index-helpers-rhel8-py3.9 + - name: search-index-helpers-rhel8-python3.9 tasks: - name: test_atlas_task_group_search_indexes - display_name: Search Index Helpers RHEL8 py3.9 + display_name: Search Index Helpers RHEL8 Python3.9 run_on: - rhel87-small expansions: PYTHON_BINARY: /opt/python/3.9/bin/python3 # Server tests - - name: test-rhel8-py3.9-cov + - name: test-rhel8-python3.9-cov tasks: - name: .standalone .sync_async - name: .replica_set .sync_async - name: .sharded_cluster .sync_async - display_name: "* Test RHEL8 py3.9 cov" + display_name: "* Test RHEL8 Python3.9 cov" run_on: - rhel87-small expansions: COVERAGE: coverage PYTHON_BINARY: /opt/python/3.9/bin/python3 tags: [coverage_tag] - - name: test-rhel8-py3.13-cov + - name: test-rhel8-python3.13-cov tasks: - name: .standalone .sync_async - name: .replica_set .sync_async - name: .sharded_cluster .sync_async - display_name: "* Test RHEL8 py3.13 cov" + display_name: "* Test RHEL8 Python3.13 cov" run_on: - rhel87-small expansions: @@ -1099,41 +1099,41 @@ buildvariants: - name: .standalone .sync_async - name: .replica_set .sync_async - name: .sharded_cluster .sync_async - display_name: "* Test RHEL8 pypy3.10 cov" + display_name: "* Test RHEL8 PyPy3.10 cov" run_on: - rhel87-small expansions: COVERAGE: coverage PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 tags: [coverage_tag] - - name: test-rhel8-py3.10 + - name: test-rhel8-python3.10 tasks: - name: .sharded_cluster .auth .ssl .sync_async - name: .replica_set .noauth .ssl .sync_async - name: .standalone .noauth .nossl .sync_async - display_name: "* Test RHEL8 py3.10" + display_name: "* Test RHEL8 Python3.10" run_on: - rhel87-small expansions: COVERAGE: coverage PYTHON_BINARY: /opt/python/3.10/bin/python3 - - name: test-rhel8-py3.11 + - name: test-rhel8-python3.11 tasks: - name: .sharded_cluster .auth .ssl .sync_async - name: .replica_set .noauth .ssl .sync_async - name: .standalone .noauth .nossl .sync_async - display_name: "* Test RHEL8 py3.11" + display_name: "* Test RHEL8 Python3.11" run_on: - rhel87-small expansions: COVERAGE: coverage PYTHON_BINARY: /opt/python/3.11/bin/python3 - - name: test-rhel8-py3.12 + - name: test-rhel8-python3.12 tasks: - name: .sharded_cluster .auth .ssl .sync_async - name: .replica_set .noauth .ssl .sync_async - name: .standalone .noauth .nossl .sync_async - display_name: "* Test RHEL8 py3.12" + display_name: "* Test RHEL8 Python3.12" run_on: - rhel87-small expansions: @@ -1144,35 +1144,33 @@ buildvariants: - name: .sharded_cluster .auth .ssl .sync_async - name: .replica_set .noauth .ssl .sync_async - name: .standalone .noauth .nossl .sync_async - display_name: "* Test RHEL8 pypy3.9" + display_name: "* Test RHEL8 PyPy3.9" run_on: - rhel87-small expansions: COVERAGE: coverage PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 - - name: test-macos-py3.9 + - name: test-macos-python3.9 tasks: - name: .sharded_cluster .auth .ssl !.sync_async - name: .replica_set .noauth .ssl !.sync_async - name: .standalone .noauth .nossl !.sync_async - display_name: "* Test macOS py3.9" + display_name: "* Test macOS Python3.9" run_on: - macos-14 expansions: - SKIP_CSOT_TESTS: "true" PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 - - name: test-macos-py3.13 + - name: test-macos-python3.13 tasks: - name: .sharded_cluster .auth .ssl !.sync_async - name: .replica_set .noauth .ssl !.sync_async - name: .standalone .noauth .nossl !.sync_async - display_name: "* Test macOS py3.13" + display_name: "* Test macOS Python3.13" run_on: - macos-14 expansions: - SKIP_CSOT_TESTS: "true" PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 - - name: test-macos-arm64-py3.9 + - name: test-macos-arm64-python3.9 tasks: - name: .sharded_cluster .auth .ssl .6.0 !.sync_async - name: .replica_set .noauth .ssl .6.0 !.sync_async @@ -1189,13 +1187,12 @@ buildvariants: - name: .sharded_cluster .auth .ssl .latest !.sync_async - name: .replica_set .noauth .ssl .latest !.sync_async - name: .standalone .noauth .nossl .latest !.sync_async - display_name: "* Test macOS Arm64 py3.9" + display_name: "* Test macOS Arm64 Python3.9" run_on: - macos-14-arm64 expansions: - SKIP_CSOT_TESTS: "true" PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.9/bin/python3 - - name: test-macos-arm64-py3.13 + - name: test-macos-arm64-python3.13 tasks: - name: .sharded_cluster .auth .ssl .6.0 !.sync_async - name: .replica_set .noauth .ssl .6.0 !.sync_async @@ -1212,62 +1209,57 @@ buildvariants: - name: .sharded_cluster .auth .ssl .latest !.sync_async - name: .replica_set .noauth .ssl .latest !.sync_async - name: .standalone .noauth .nossl .latest !.sync_async - display_name: "* Test macOS Arm64 py3.13" + display_name: "* Test macOS Arm64 Python3.13" run_on: - macos-14-arm64 expansions: - SKIP_CSOT_TESTS: "true" PYTHON_BINARY: /Library/Frameworks/Python.Framework/Versions/3.13/bin/python3 - - name: test-win64-py3.9 + - name: test-win64-python3.9 tasks: - name: .sharded_cluster .auth .ssl !.sync_async - name: .replica_set .noauth .ssl !.sync_async - name: .standalone .noauth .nossl !.sync_async - display_name: "* Test Win64 py3.9" + display_name: "* Test Win64 Python3.9" run_on: - windows-64-vsMulti-small expansions: - SKIP_CSOT_TESTS: "true" PYTHON_BINARY: C:/python/Python39/python.exe - - name: test-win64-py3.13 + - name: test-win64-python3.13 tasks: - name: .sharded_cluster .auth .ssl !.sync_async - name: .replica_set .noauth .ssl !.sync_async - name: .standalone .noauth .nossl !.sync_async - display_name: "* Test Win64 py3.13" + display_name: "* Test Win64 Python3.13" run_on: - windows-64-vsMulti-small expansions: - SKIP_CSOT_TESTS: "true" PYTHON_BINARY: C:/python/Python313/python.exe - - name: test-win32-py3.9 + - name: test-win32-python3.9 tasks: - name: .sharded_cluster .auth .ssl !.sync_async - name: .replica_set .noauth .ssl !.sync_async - name: .standalone .noauth .nossl !.sync_async - display_name: "* Test Win32 py3.9" + display_name: "* Test Win32 Python3.9" run_on: - windows-64-vsMulti-small expansions: - SKIP_CSOT_TESTS: "true" PYTHON_BINARY: C:/python/32/Python39/python.exe - - name: test-win32-py3.13 + - name: test-win32-python3.13 tasks: - name: .sharded_cluster .auth .ssl !.sync_async - name: .replica_set .noauth .ssl !.sync_async - name: .standalone .noauth .nossl !.sync_async - display_name: "* Test Win32 py3.13" + display_name: "* Test Win32 Python3.13" run_on: - windows-64-vsMulti-small expansions: - SKIP_CSOT_TESTS: "true" PYTHON_BINARY: C:/python/32/Python313/python.exe # Serverless tests - - name: serverless-rhel8-py3.9 + - name: serverless-rhel8-python3.9 tasks: - name: serverless_task_group - display_name: Serverless RHEL8 py3.9 + display_name: Serverless RHEL8 Python3.9 run_on: - rhel87-small batchtime: 10080 @@ -1276,10 +1268,10 @@ buildvariants: AUTH: auth SSL: ssl PYTHON_BINARY: /opt/python/3.9/bin/python3 - - name: serverless-rhel8-py3.13 + - name: serverless-rhel8-python3.13 tasks: - name: serverless_task_group - display_name: Serverless RHEL8 py3.13 + display_name: Serverless RHEL8 Python3.13 run_on: - rhel87-small batchtime: 10080 @@ -1290,7 +1282,7 @@ buildvariants: PYTHON_BINARY: /opt/python/3.13/bin/python3 # Stable api tests - - name: stable-api-require-v1-rhel8-py3.9-auth + - name: stable-api-require-v1-rhel8-python3.9-auth tasks: - name: .standalone .5.0 .noauth .nossl .sync_async - name: .standalone .6.0 .noauth .nossl .sync_async @@ -1298,7 +1290,7 @@ buildvariants: - name: .standalone .8.0 .noauth .nossl .sync_async - name: .standalone .rapid .noauth .nossl .sync_async - name: .standalone .latest .noauth .nossl .sync_async - display_name: Stable API require v1 RHEL8 py3.9 Auth + display_name: Stable API require v1 RHEL8 Python3.9 Auth run_on: - rhel87-small expansions: @@ -1307,7 +1299,7 @@ buildvariants: MONGODB_API_VERSION: "1" PYTHON_BINARY: /opt/python/3.9/bin/python3 tags: [versionedApi_tag] - - name: stable-api-accept-v2-rhel8-py3.9-auth + - name: stable-api-accept-v2-rhel8-python3.9-auth tasks: - name: .standalone .5.0 .noauth .nossl .sync_async - name: .standalone .6.0 .noauth .nossl .sync_async @@ -1315,7 +1307,7 @@ buildvariants: - name: .standalone .8.0 .noauth .nossl .sync_async - name: .standalone .rapid .noauth .nossl .sync_async - name: .standalone .latest .noauth .nossl .sync_async - display_name: Stable API accept v2 RHEL8 py3.9 Auth + display_name: Stable API accept v2 RHEL8 Python3.9 Auth run_on: - rhel87-small expansions: @@ -1323,7 +1315,7 @@ buildvariants: ORCHESTRATION_FILE: versioned-api-testing.json PYTHON_BINARY: /opt/python/3.9/bin/python3 tags: [versionedApi_tag] - - name: stable-api-require-v1-rhel8-py3.13-auth + - name: stable-api-require-v1-rhel8-python3.13-auth tasks: - name: .standalone .5.0 .noauth .nossl .sync_async - name: .standalone .6.0 .noauth .nossl .sync_async @@ -1331,7 +1323,7 @@ buildvariants: - name: .standalone .8.0 .noauth .nossl .sync_async - name: .standalone .rapid .noauth .nossl .sync_async - name: .standalone .latest .noauth .nossl .sync_async - display_name: Stable API require v1 RHEL8 py3.13 Auth + display_name: Stable API require v1 RHEL8 Python3.13 Auth run_on: - rhel87-small expansions: @@ -1340,7 +1332,7 @@ buildvariants: MONGODB_API_VERSION: "1" PYTHON_BINARY: /opt/python/3.13/bin/python3 tags: [versionedApi_tag] - - name: stable-api-accept-v2-rhel8-py3.13-auth + - name: stable-api-accept-v2-rhel8-python3.13-auth tasks: - name: .standalone .5.0 .noauth .nossl .sync_async - name: .standalone .6.0 .noauth .nossl .sync_async @@ -1348,7 +1340,7 @@ buildvariants: - name: .standalone .8.0 .noauth .nossl .sync_async - name: .standalone .rapid .noauth .nossl .sync_async - name: .standalone .latest .noauth .nossl .sync_async - display_name: Stable API accept v2 RHEL8 py3.13 Auth + display_name: Stable API accept v2 RHEL8 Python3.13 Auth run_on: - rhel87-small expansions: @@ -1358,7 +1350,7 @@ buildvariants: tags: [versionedApi_tag] # Storage engine tests - - name: storage-inmemory-rhel8-py3.9 + - name: storage-inmemory-rhel8-python3.9 tasks: - name: .standalone .noauth .nossl .4.0 .sync_async - name: .standalone .noauth .nossl .4.4 .sync_async @@ -1368,17 +1360,17 @@ buildvariants: - name: .standalone .noauth .nossl .8.0 .sync_async - name: .standalone .noauth .nossl .rapid .sync_async - name: .standalone .noauth .nossl .latest .sync_async - display_name: Storage InMemory RHEL8 py3.9 + display_name: Storage InMemory RHEL8 Python3.9 run_on: - rhel87-small expansions: STORAGE_ENGINE: inmemory PYTHON_BINARY: /opt/python/3.9/bin/python3 - - name: storage-mmapv1-rhel8-py3.9 + - name: storage-mmapv1-rhel8-python3.9 tasks: - name: .standalone .4.0 .noauth .nossl .sync_async - name: .replica_set .4.0 .noauth .nossl .sync_async - display_name: Storage MMAPv1 RHEL8 py3.9 + display_name: Storage MMAPv1 RHEL8 Python3.9 run_on: - rhel87-small expansions: diff --git a/.evergreen/scripts/configure-env.sh b/.evergreen/scripts/configure-env.sh index 0c9c8bb03a..98d400037c 100644 --- a/.evergreen/scripts/configure-env.sh +++ b/.evergreen/scripts/configure-env.sh @@ -46,6 +46,11 @@ export PROJECT="$project" export PIP_QUIET=1 EOT +# Skip CSOT tests on non-linux platforms. +if [ "$(uname -s)" != "Linux" ]; then + echo "export SKIP_CSOT_TESTS=1" >> $SCRIPT_DIR/env.sh +fi + # Add these expansions to make it easier to call out tests scripts from the EVG yaml cat < expansion.yml DRIVERS_TOOLS: "$DRIVERS_TOOLS" diff --git a/.evergreen/scripts/generate_config.py b/.evergreen/scripts/generate_config.py index 05529ecb25..b7187b50db 100644 --- a/.evergreen/scripts/generate_config.py +++ b/.evergreen/scripts/generate_config.py @@ -57,17 +57,26 @@ class Host: name: str run_on: str display_name: str + variables: dict[str, str] | None # Hosts with toolchains. -HOSTS["rhel8"] = Host("rhel8", "rhel87-small", "RHEL8") -HOSTS["win64"] = Host("win64", "windows-64-vsMulti-small", "Win64") -HOSTS["win32"] = Host("win32", "windows-64-vsMulti-small", "Win32") -HOSTS["macos"] = Host("macos", "macos-14", "macOS") -HOSTS["macos-arm64"] = Host("macos-arm64", "macos-14-arm64", "macOS Arm64") -HOSTS["ubuntu20"] = Host("ubuntu20", "ubuntu2004-small", "Ubuntu-20") -HOSTS["ubuntu22"] = Host("ubuntu22", "ubuntu2204-small", "Ubuntu-22") -HOSTS["rhel7"] = Host("rhel7", "rhel79-small", "RHEL7") +HOSTS["rhel8"] = Host("rhel8", "rhel87-small", "RHEL8", dict()) +HOSTS["win64"] = Host("win64", "windows-64-vsMulti-small", "Win64", dict()) +HOSTS["win32"] = Host("win32", "windows-64-vsMulti-small", "Win32", dict()) +HOSTS["macos"] = Host("macos", "macos-14", "macOS", dict()) +HOSTS["macos-arm64"] = Host("macos-arm64", "macos-14-arm64", "macOS Arm64", dict()) +HOSTS["ubuntu20"] = Host("ubuntu20", "ubuntu2004-small", "Ubuntu-20", dict()) +HOSTS["ubuntu22"] = Host("ubuntu22", "ubuntu2204-small", "Ubuntu-22", dict()) +HOSTS["rhel7"] = Host("rhel7", "rhel79-small", "RHEL7", dict()) +DEFAULT_HOST = HOSTS["rhel8"] + +# Other hosts +OTHER_HOSTS = ["RHEL9-FIPS", "RHEL8-zseries", "RHEL8-POWER8", "RHEL8-arm64"] +for name, run_on in zip( + OTHER_HOSTS, ["rhel92-fips", "rhel8-zseries-small", "rhel8-power-small", "rhel82-arm64-small"] +): + HOSTS[name] = Host(name, run_on, name, dict()) ############## @@ -75,57 +84,77 @@ class Host: ############## -def create_variant( +def create_variant_generic( task_names: list[str], display_name: str, *, - python: str | None = None, - version: str | None = None, - host: str | None = None, + host: Host | None = None, + default_run_on="rhel87-small", + expansions: dict | None = None, **kwargs: Any, ) -> BuildVariant: """Create a build variant for the given inputs.""" task_refs = [EvgTaskRef(name=n) for n in task_names] - kwargs.setdefault("expansions", dict()) - expansions = kwargs.pop("expansions", dict()).copy() + expansions = expansions and expansions.copy() or dict() if "run_on" in kwargs: run_on = kwargs.pop("run_on") + elif host: + run_on = [host.run_on] + if host.variables: + expansions.update(host.variables) else: - host = host or "rhel8" - run_on = [HOSTS[host].run_on] + run_on = [default_run_on] + if isinstance(run_on, str): + run_on = [run_on] name = display_name.replace(" ", "-").replace("*-", "").lower() - if python: - expansions["PYTHON_BINARY"] = get_python_binary(python, host) - if version: - expansions["VERSION"] = version - expansions = expansions or None return BuildVariant( name=name, display_name=display_name, tasks=task_refs, - expansions=expansions, + expansions=expansions or None, run_on=run_on, **kwargs, ) -def get_python_binary(python: str, host: str) -> str: +def create_variant( + task_names: list[str], + display_name: str, + *, + version: str | None = None, + host: Host | None = None, + python: str | None = None, + expansions: dict | None = None, + **kwargs: Any, +) -> BuildVariant: + expansions = expansions and expansions.copy() or dict() + if version: + expansions["VERSION"] = version + if python: + expansions["PYTHON_BINARY"] = get_python_binary(python, host) + return create_variant_generic( + task_names, display_name, version=version, host=host, expansions=expansions, **kwargs + ) + + +def get_python_binary(python: str, host: Host) -> str: """Get the appropriate python binary given a python version and host.""" - if host in ["win64", "win32"]: - if host == "win32": + name = host.name + if name in ["win64", "win32"]: + if name == "win32": base = "C:/python/32" else: base = "C:/python" python = python.replace(".", "") return f"{base}/Python{python}/python.exe" - if host in ["rhel8", "ubuntu22", "ubuntu20", "rhel7"]: + if name in ["rhel8", "ubuntu22", "ubuntu20", "rhel7"]: return f"/opt/python/{python}/bin/python3" - if host in ["macos", "macos-arm64"]: + if name in ["macos", "macos-arm64"]: return f"/Library/Frameworks/Python.Framework/Versions/{python}/bin/python3" - raise ValueError(f"no match found for python {python} on {host}") + raise ValueError(f"no match found for python {python} on {name}") def get_versions_from(min_version: str) -> list[str]: @@ -146,11 +175,11 @@ def get_versions_until(max_version: str) -> list[str]: return versions -def get_display_name(base: str, host: str | None = None, **kwargs) -> str: +def get_display_name(base: str, host: Host | None = None, **kwargs) -> str: """Get the display name of a variant.""" display_name = base if host is not None: - display_name += f" {HOSTS[host].display_name}" + display_name += f" {host.display_name}" version = kwargs.pop("VERSION", None) version = version or kwargs.pop("version", None) if version: @@ -161,7 +190,9 @@ def get_display_name(base: str, host: str | None = None, **kwargs) -> str: name = value if key.lower() == "python": if not value.startswith("pypy"): - name = f"py{value}" + name = f"Python{value}" + else: + name = f"PyPy{value.replace('pypy', '')}" elif key.lower() in DISPLAY_LOOKUP: name = DISPLAY_LOOKUP[key.lower()][value] else: @@ -203,10 +234,10 @@ def create_ocsp_variants() -> list[BuildVariant]: expansions = dict(AUTH="noauth", SSL="ssl", TOPOLOGY="server") base_display = "OCSP" - # OCSP tests on rhel8 with all servers v4.4+ and all python versions. + # OCSP tests on default host with all servers v4.4+ and all python versions. versions = [v for v in ALL_VERSIONS if v != "4.0"] for version, python in zip_cycle(versions, ALL_PYTHONS): - host = "rhel8" + host = DEFAULT_HOST variant = create_variant( [".ocsp"], get_display_name(base_display, host, version=version, python=python), @@ -220,7 +251,8 @@ def create_ocsp_variants() -> list[BuildVariant]: # OCSP tests on Windows and MacOS. # MongoDB servers on these hosts do not staple OCSP responses and only support RSA. - for host, version in product(["win64", "macos"], ["4.4", "8.0"]): + for host_name, version in product(["win64", "macos"], ["4.4", "8.0"]): + host = HOSTS[host_name] python = CPYTHONS[0] if version == "4.4" else CPYTHONS[-1] variant = create_variant( [".ocsp-rsa !.ocsp-staple"], @@ -240,7 +272,7 @@ def create_server_variants() -> list[BuildVariant]: variants = [] # Run the full matrix on linux with min and max CPython, and latest pypy. - host = "rhel8" + host = DEFAULT_HOST # Prefix the display name with an asterisk so it is sorted first. base_display_name = "* Test" for python in [*MIN_MAX_PYTHON, PYPYS[-1]]: @@ -270,23 +302,17 @@ def create_server_variants() -> list[BuildVariant]: variants.append(variant) # Test a subset on each of the other platforms. - for host in ("macos", "macos-arm64", "win64", "win32"): + for host_name in ("macos", "macos-arm64", "win64", "win32"): for python in MIN_MAX_PYTHON: tasks = [f"{t} !.sync_async" for t in SUB_TASKS] # MacOS arm64 only works on server versions 6.0+ - if host == "macos-arm64": + if host_name == "macos-arm64": tasks = [] for version in get_versions_from("6.0"): tasks.extend(f"{t} .{version} !.sync_async" for t in SUB_TASKS) - expansions = dict(SKIP_CSOT_TESTS="true") - display_name = get_display_name(base_display_name, host, python=python, **expansions) - variant = create_variant( - tasks, - display_name, - python=python, - host=host, - expansions=expansions, - ) + host = HOSTS[host_name] + display_name = get_display_name(base_display_name, host, python=python) + variant = create_variant(tasks, display_name, python=python, host=host) variants.append(variant) return variants @@ -305,7 +331,7 @@ def get_encryption_expansions(encryption): expansions["test_encryption_pyopenssl"] = "true" return expansions - host = "rhel8" + host = DEFAULT_HOST # Test against all server versions for the three main python versions. encryptions = ["Encryption", "Encryption crypt_shared", "Encryption PyOpenSSL"] @@ -339,7 +365,8 @@ def get_encryption_expansions(encryption): # Test on macos and linux on one server version and topology for min and max python. encryptions = ["Encryption", "Encryption crypt_shared"] task_names = [".latest .replica_set .sync_async"] - for host, encryption, python in product(["macos", "win64"], encryptions, MIN_MAX_PYTHON): + for host_name, encryption, python in product(["macos", "win64"], encryptions, MIN_MAX_PYTHON): + host = HOSTS[host_name] expansions = get_encryption_expansions(encryption) display_name = get_display_name(encryption, host, python=python, **expansions) variant = create_variant( @@ -357,7 +384,7 @@ def get_encryption_expansions(encryption): def create_load_balancer_variants(): # Load balancer tests - run all supported server versions using the lowest supported python. - host = "rhel8" + host = DEFAULT_HOST batchtime = BATCHTIME_WEEK versions = get_versions_from("6.0") variants = [] @@ -379,7 +406,7 @@ def create_load_balancer_variants(): def create_compression_variants(): # Compression tests - standalone versions of each server, across python versions, with and without c extensions. # PyPy interpreters are always tested without extensions. - host = "rhel8" + host = DEFAULT_HOST base_task = ".standalone .noauth .nossl .sync_async" task_names = dict(snappy=[base_task], zlib=[base_task], zstd=[f"{base_task} !.4.0"]) variants = [] @@ -423,11 +450,11 @@ def create_enterprise_auth_variants(): # All python versions across platforms. for python in ALL_PYTHONS: if python == CPYTHONS[0]: - host = "macos" + host = HOSTS["macos"] elif python == CPYTHONS[-1]: - host = "win64" + host = HOSTS["win64"] else: - host = "rhel8" + host = DEFAULT_HOST display_name = get_display_name("Auth Enterprise", host, python=python, **expansions) variant = create_variant( ["test-enterprise-auth"], display_name, host=host, python=python, expansions=expansions @@ -448,11 +475,11 @@ def create_pyopenssl_variants(): auth = "noauth" if python == CPYTHONS[0] else "auth" ssl = "nossl" if auth == "noauth" else "ssl" if python == CPYTHONS[0]: - host = "macos" + host = HOSTS["macos"] elif python == CPYTHONS[-1]: - host = "win64" + host = HOSTS["win64"] else: - host = "rhel8" + host = DEFAULT_HOST display_name = get_display_name(base_name, host, python=python) variant = create_variant( @@ -469,7 +496,7 @@ def create_pyopenssl_variants(): def create_storage_engine_variants(): - host = "rhel8" + host = DEFAULT_HOST engines = ["InMemory", "MMAPv1"] variants = [] for engine in engines: @@ -492,7 +519,7 @@ def create_storage_engine_variants(): def create_stable_api_variants(): - host = "rhel8" + host = DEFAULT_HOST tags = ["versionedApi_tag"] tasks = [f".standalone .{v} .noauth .nossl .sync_async" for v in get_versions_from("5.0")] variants = [] @@ -526,7 +553,7 @@ def create_stable_api_variants(): def create_green_framework_variants(): variants = [] tasks = [".standalone .noauth .nossl .sync_async"] - host = "rhel8" + host = DEFAULT_HOST for python, framework in product([CPYTHONS[0], CPYTHONS[-2]], ["eventlet", "gevent"]): expansions = dict(GREEN_FRAMEWORK=framework, AUTH="auth", SSL="ssl") display_name = get_display_name(f"Green {framework.capitalize()}", host, python=python) @@ -539,7 +566,7 @@ def create_green_framework_variants(): def create_no_c_ext_variants(): variants = [] - host = "rhel8" + host = DEFAULT_HOST for python, topology in zip_cycle(CPYTHONS, TOPOLOGIES): tasks = [f".{topology} .noauth .nossl .sync_async"] expansions = dict() @@ -554,7 +581,7 @@ def create_no_c_ext_variants(): def create_atlas_data_lake_variants(): variants = [] - host = "ubuntu22" + host = HOSTS["ubuntu22"] for python, c_ext in product(MIN_MAX_PYTHON, C_EXTS): tasks = ["atlas-data-lake-tests"] expansions = dict(AUTH="auth") @@ -569,7 +596,7 @@ def create_atlas_data_lake_variants(): def create_mod_wsgi_variants(): variants = [] - host = "ubuntu22" + host = HOSTS["ubuntu22"] tasks = [ "mod-wsgi-standalone", "mod-wsgi-replica-set", @@ -587,7 +614,7 @@ def create_mod_wsgi_variants(): def create_disable_test_commands_variants(): - host = "rhel8" + host = DEFAULT_HOST expansions = dict(AUTH="auth", SSL="ssl", DISABLE_TEST_COMMANDS="1") python = CPYTHONS[0] display_name = get_display_name("Disable test commands", host, python=python) @@ -596,7 +623,7 @@ def create_disable_test_commands_variants(): def create_serverless_variants(): - host = "rhel8" + host = DEFAULT_HOST batchtime = BATCHTIME_WEEK expansions = dict(test_serverless="true", AUTH="auth", SSL="ssl") tasks = ["serverless_task_group"] @@ -617,10 +644,11 @@ def create_serverless_variants(): def create_oidc_auth_variants(): variants = [] other_tasks = ["testazureoidc_task_group", "testgcpoidc_task_group", "testk8soidc_task_group"] - for host in ["ubuntu22", "macos", "win64"]: + for host_name in ["ubuntu22", "macos", "win64"]: tasks = ["testoidc_task_group"] - if host == "ubuntu22": + if host_name == "ubuntu22": tasks += other_tasks + host = HOSTS[host_name] variants.append( create_variant( tasks, @@ -633,7 +661,7 @@ def create_oidc_auth_variants(): def create_search_index_variants(): - host = "rhel8" + host = DEFAULT_HOST python = CPYTHONS[0] return [ create_variant( @@ -646,7 +674,7 @@ def create_search_index_variants(): def create_mockupdb_variants(): - host = "rhel8" + host = DEFAULT_HOST python = CPYTHONS[0] return [ create_variant( @@ -659,7 +687,7 @@ def create_mockupdb_variants(): def create_doctests_variants(): - host = "rhel8" + host = DEFAULT_HOST python = CPYTHONS[0] return [ create_variant( @@ -672,7 +700,7 @@ def create_doctests_variants(): def create_atlas_connect_variants(): - host = "rhel8" + host = DEFAULT_HOST return [ create_variant( ["atlas-connect"], @@ -696,13 +724,14 @@ def create_aws_auth_variants(): "aws-auth-test-latest", ] - for host, python in product(["ubuntu20", "win64", "macos"], MIN_MAX_PYTHON): + for host_name, python in product(["ubuntu20", "win64", "macos"], MIN_MAX_PYTHON): expansions = dict() - if host != "ubuntu20": + if host_name != "ubuntu20": expansions["skip_ECS_auth_test"] = "true" - if host == "macos": + if host_name == "macos": expansions["skip_EC2_auth_test"] = "true" expansions["skip_web_identity_auth_test"] = "true" + host = HOSTS[host_name] variant = create_variant( tasks, get_display_name("Auth AWS", host, python=python), @@ -719,11 +748,11 @@ def create_alternative_hosts_variants(): batchtime = BATCHTIME_WEEK variants = [] - host = "rhel7" + host = HOSTS["rhel7"] variants.append( create_variant( [".5.0 .standalone !.sync_async"], - get_display_name("OpenSSL 1.0.2", "rhel7", python=CPYTHONS[0], **expansions), + get_display_name("OpenSSL 1.0.2", host, python=CPYTHONS[0], **expansions), host=host, python=CPYTHONS[0], batchtime=batchtime, @@ -731,16 +760,15 @@ def create_alternative_hosts_variants(): ) ) - hosts = ["rhel92-fips", "rhel8-zseries-small", "rhel8-power-small", "rhel82-arm64-small"] - host_names = ["RHEL9-FIPS", "RHEL8-zseries", "RHEL8-POWER8", "RHEL8-arm64"] - for host, host_name in zip(hosts, host_names): + for host_name in OTHER_HOSTS: + host = HOSTS[host_name] variants.append( create_variant( [".6.0 .standalone !.sync_async"], - display_name=get_display_name(f"Other hosts {host_name}", **expansions), + display_name=get_display_name("Other hosts", host, **expansions), expansions=expansions, batchtime=batchtime, - run_on=[host], + host=host, ) ) return variants From 5e5528238ca2118e7dd3737ee144f12bc8187f34 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Mon, 11 Nov 2024 13:24:22 -0600 Subject: [PATCH 099/182] PYTHON-4817 Revert import guard on asyncio (#1894) --- pymongo/__init__.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/pymongo/__init__.py b/pymongo/__init__.py index 6416f939e8..58f6ff338b 100644 --- a/pymongo/__init__.py +++ b/pymongo/__init__.py @@ -88,6 +88,7 @@ from pymongo import _csot from pymongo._version import __version__, get_version_string, version_tuple +from pymongo.asynchronous.mongo_client import AsyncMongoClient from pymongo.common import MAX_SUPPORTED_WIRE_VERSION, MIN_SUPPORTED_WIRE_VERSION, has_c from pymongo.cursor import CursorType from pymongo.operations import ( @@ -104,14 +105,6 @@ from pymongo.synchronous.mongo_client import MongoClient from pymongo.write_concern import WriteConcern -try: - from pymongo.asynchronous.mongo_client import AsyncMongoClient -except Exception as e: - # PYTHON-4781: Importing asyncio can fail on Windows. - import warnings as _warnings - - _warnings.warn(f"Failed to import Async PyMongo: {e!r}", ImportWarning, stacklevel=2) - version = __version__ """Current version of PyMongo.""" From 63c3f8aedec0ebac01f46929f83d69f1e2b6dfcd Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Mon, 11 Nov 2024 13:25:42 -0600 Subject: [PATCH 100/182] PYTHON-4959 Adopt zizmor GitHub Actions security scanner (#2001) --- .github/workflows/codeql.yml | 1 + .github/workflows/dist.yml | 2 ++ .github/workflows/test-python.yml | 14 ++++++++++++++ .github/workflows/zizmor.yml | 32 +++++++++++++++++++++++++++++++ 4 files changed, 49 insertions(+) create mode 100644 .github/workflows/zizmor.yml diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 2dc070d7c6..e620cb1801 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -39,6 +39,7 @@ jobs: uses: actions/checkout@v4 with: ref: ${{ inputs.ref }} + persist-credentials: false - uses: actions/setup-python@v5 # Initializes the CodeQL tools for scanning. diff --git a/.github/workflows/dist.yml b/.github/workflows/dist.yml index 858d269e08..a4c5a8279b 100644 --- a/.github/workflows/dist.yml +++ b/.github/workflows/dist.yml @@ -48,6 +48,7 @@ jobs: uses: actions/checkout@v4 with: fetch-depth: 0 + persist-credentials: false ref: ${{ inputs.ref }} - uses: actions/setup-python@v5 @@ -106,6 +107,7 @@ jobs: - uses: actions/checkout@v4 with: fetch-depth: 0 + persist-credentials: false ref: ${{ inputs.ref }} - uses: actions/setup-python@v5 diff --git a/.github/workflows/test-python.yml b/.github/workflows/test-python.yml index 40991440d3..12cfaa4b27 100644 --- a/.github/workflows/test-python.yml +++ b/.github/workflows/test-python.yml @@ -20,6 +20,8 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 + with: + persist-credentials: false - uses: actions/setup-python@v5 with: python-version: "3.9" @@ -55,6 +57,8 @@ jobs: name: CPython ${{ matrix.python-version }}-${{ matrix.os }} steps: - uses: actions/checkout@v4 + with: + persist-credentials: false - if: ${{ matrix.python-version == '3.13t' }} name: Setup free-threaded Python uses: deadsnakes/action@v3.2.0 @@ -99,6 +103,8 @@ jobs: name: DocTest steps: - uses: actions/checkout@v4 + with: + persist-credentials: false - name: Setup Python uses: actions/setup-python@v5 with: @@ -121,6 +127,8 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 + with: + persist-credentials: false - uses: actions/setup-python@v5 with: cache: 'pip' @@ -139,6 +147,8 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 + with: + persist-credentials: false - uses: actions/setup-python@v5 with: cache: 'pip' @@ -160,6 +170,8 @@ jobs: python: ["3.9", "3.11"] steps: - uses: actions/checkout@v4 + with: + persist-credentials: false - uses: actions/setup-python@v5 with: python-version: "${{matrix.python}}" @@ -177,6 +189,8 @@ jobs: name: "Make an sdist" steps: - uses: actions/checkout@v4 + with: + persist-credentials: false - uses: actions/setup-python@v5 with: cache: 'pip' diff --git a/.github/workflows/zizmor.yml b/.github/workflows/zizmor.yml new file mode 100644 index 0000000000..31afeb6655 --- /dev/null +++ b/.github/workflows/zizmor.yml @@ -0,0 +1,32 @@ +name: GitHub Actions Security Analysis with zizmor 🌈 + +on: + push: + branches: ["master"] + pull_request: + branches: ["**"] + +jobs: + zizmor: + name: zizmor latest via Cargo + runs-on: ubuntu-latest + permissions: + security-events: write + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + persist-credentials: false + - name: Setup Rust + uses: actions-rust-lang/setup-rust-toolchain@v1 + - name: Get zizmor + run: cargo install zizmor + - name: Run zizmor 🌈 + run: zizmor --format sarif . > results.sarif + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Upload SARIF file + uses: github/codeql-action/upload-sarif@v3 + with: + sarif_file: results.sarif + category: zizmor From 72a51092cd84297c495fb13049a13abafb704bb2 Mon Sep 17 00:00:00 2001 From: Noah Stapp Date: Tue, 12 Nov 2024 09:32:41 -0500 Subject: [PATCH 101/182] PYTHON-4915 - Add guidance on adding _id fields to documents to CRUD spec, reorder client.bulk_write generated _id fields (#1976) --- pymongo/message.py | 13 ++- test/asynchronous/test_client_bulk_write.py | 14 +++ test/mockupdb/test_id_ordering.py | 94 +++++++++++++++++++++ test/test_client_bulk_write.py | 14 +++ 4 files changed, 134 insertions(+), 1 deletion(-) create mode 100644 test/mockupdb/test_id_ordering.py diff --git a/pymongo/message.py b/pymongo/message.py index de77ccd382..3e2ae00ae7 100644 --- a/pymongo/message.py +++ b/pymongo/message.py @@ -24,6 +24,7 @@ import datetime import random import struct +from collections import ChainMap from io import BytesIO as _BytesIO from typing import ( TYPE_CHECKING, @@ -1111,8 +1112,18 @@ def _check_doc_size_limits( # key and the index of its namespace within ns_info as its value. op_doc[op_type] = ns_info[namespace] # type: ignore[index] + # Since the data document itself is nested within the insert document + # it won't be automatically re-ordered by the BSON conversion. + # We use ChainMap here to make the _id field the first field instead. + doc_to_encode = op_doc + if real_op_type == "insert": + doc = op_doc["document"] + if not isinstance(doc, RawBSONDocument): + doc_to_encode = op_doc.copy() # type: ignore[attr-defined] # Shallow copy + doc_to_encode["document"] = ChainMap(doc, {"_id": doc["_id"]}) # type: ignore[index] + # Encode current operation doc and, if newly added, namespace doc. - op_doc_encoded = _dict_to_bson(op_doc, False, opts) + op_doc_encoded = _dict_to_bson(doc_to_encode, False, opts) op_length = len(op_doc_encoded) if ns_doc: ns_doc_encoded = _dict_to_bson(ns_doc, False, opts) diff --git a/test/asynchronous/test_client_bulk_write.py b/test/asynchronous/test_client_bulk_write.py index 5f6b3353e8..01294402de 100644 --- a/test/asynchronous/test_client_bulk_write.py +++ b/test/asynchronous/test_client_bulk_write.py @@ -18,6 +18,9 @@ import os import sys +from bson import encode +from bson.raw_bson import RawBSONDocument + sys.path[0:0] = [""] from test.asynchronous import ( @@ -84,6 +87,17 @@ async def test_formats_write_error_correctly(self): self.assertEqual(write_error["idx"], 1) self.assertEqual(write_error["op"], {"insert": 0, "document": {"_id": 1}}) + @async_client_context.require_version_min(8, 0, 0, -24) + @async_client_context.require_no_serverless + async def test_raw_bson_not_inflated(self): + doc = RawBSONDocument(encode({"a": "b" * 100})) + models = [ + InsertOne(namespace="db.coll", document=doc), + ] + await self.client.bulk_write(models=models) + + self.assertIsNone(doc._RawBSONDocument__inflated_doc) + # https://github.com/mongodb/specifications/tree/master/source/crud/tests class TestClientBulkWriteCRUD(AsyncIntegrationTest): diff --git a/test/mockupdb/test_id_ordering.py b/test/mockupdb/test_id_ordering.py new file mode 100644 index 0000000000..7e2c91d592 --- /dev/null +++ b/test/mockupdb/test_id_ordering.py @@ -0,0 +1,94 @@ +# Copyright 2024-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from test import PyMongoTestCase + +import pytest + +from pymongo import InsertOne + +try: + from mockupdb import MockupDB, OpMsg, go, going + + _HAVE_MOCKUPDB = True +except ImportError: + _HAVE_MOCKUPDB = False + + +from bson.objectid import ObjectId + +pytestmark = pytest.mark.mockupdb + + +# https://github.com/mongodb/specifications/blob/master/source/crud/tests/README.md#16-generated-document-identifiers-are-the-first-field-in-their-document +class TestIdOrdering(PyMongoTestCase): + def test_16_generated_document_ids_are_first_field(self): + server = MockupDB() + server.autoresponds( + "hello", + isWritablePrimary=True, + msg="isdbgrid", + minWireVersion=0, + maxWireVersion=25, + helloOk=True, + serviceId=ObjectId(), + ) + server.run() + self.addCleanup(server.stop) + + # We also verify that the original document contains an _id field after each insert + document = {"x": 1} + + client = self.simple_client(server.uri, loadBalanced=True) + collection = client.db.coll + with going(collection.insert_one, document): + request = server.receives() + self.assertEqual("_id", next(iter(request["documents"][0]))) + request.reply({"ok": 1}) + self.assertIn("_id", document) + + document = {"x1": 1} + + with going(collection.bulk_write, [InsertOne(document)]): + request = server.receives() + self.assertEqual("_id", next(iter(request["documents"][0]))) + request.reply({"ok": 1}) + self.assertIn("_id", document) + + document = {"x2": 1} + with going(client.bulk_write, [InsertOne(namespace="db.coll", document=document)]): + request = server.receives() + self.assertEqual("_id", next(iter(request["ops"][0]["document"]))) + request.reply({"ok": 1}) + self.assertIn("_id", document) + + # Re-ordering user-supplied _id fields is not required by the spec, but PyMongo does it for performance reasons + with going(collection.insert_one, {"x": 1, "_id": 111}): + request = server.receives() + self.assertEqual("_id", next(iter(request["documents"][0]))) + request.reply({"ok": 1}) + + with going(collection.bulk_write, [InsertOne({"x1": 1, "_id": 1111})]): + request = server.receives() + self.assertEqual("_id", next(iter(request["documents"][0]))) + request.reply({"ok": 1}) + + with going( + client.bulk_write, [InsertOne(namespace="db.coll", document={"x2": 1, "_id": 11111})] + ): + request = server.receives() + self.assertEqual("_id", next(iter(request["ops"][0]["document"]))) + request.reply({"ok": 1}) diff --git a/test/test_client_bulk_write.py b/test/test_client_bulk_write.py index 733970dd57..f06c07d588 100644 --- a/test/test_client_bulk_write.py +++ b/test/test_client_bulk_write.py @@ -18,6 +18,9 @@ import os import sys +from bson import encode +from bson.raw_bson import RawBSONDocument + sys.path[0:0] = [""] from test import ( @@ -84,6 +87,17 @@ def test_formats_write_error_correctly(self): self.assertEqual(write_error["idx"], 1) self.assertEqual(write_error["op"], {"insert": 0, "document": {"_id": 1}}) + @client_context.require_version_min(8, 0, 0, -24) + @client_context.require_no_serverless + def test_raw_bson_not_inflated(self): + doc = RawBSONDocument(encode({"a": "b" * 100})) + models = [ + InsertOne(namespace="db.coll", document=doc), + ] + self.client.bulk_write(models=models) + + self.assertIsNone(doc._RawBSONDocument__inflated_doc) + # https://github.com/mongodb/specifications/tree/master/source/crud/tests class TestClientBulkWriteCRUD(IntegrationTest): From 35b2fbbd020f91f180eaf7ca5335c5bd1fb6d1bf Mon Sep 17 00:00:00 2001 From: Shane Harvey Date: Fri, 15 Nov 2024 08:57:34 -0800 Subject: [PATCH 102/182] PYTHON-4977 Fix import time on Windows again (#2003) --- pymongo/pool_options.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/pymongo/pool_options.py b/pymongo/pool_options.py index f3ed6cd2c1..038dbb3b5d 100644 --- a/pymongo/pool_options.py +++ b/pymongo/pool_options.py @@ -70,13 +70,14 @@ "version": platform.mac_ver()[0], } elif sys.platform == "win32": + _ver = sys.getwindowsversion() _METADATA["os"] = { - "type": platform.system(), - # "Windows XP", "Windows 7", "Windows 10", etc. - "name": " ".join((platform.system(), platform.release())), - "architecture": platform.machine(), - # Windows patch level (e.g. 5.1.2600-SP3) - "version": "-".join(platform.win32_ver()[1:3]), + "type": "Windows", + "name": "Windows", + # Avoid using platform calls, see PYTHON-4455. + "architecture": os.environ.get("PROCESSOR_ARCHITECTURE") or platform.machine(), + # Windows patch level (e.g. 10.0.17763-SP0). + "version": ".".join(map(str, _ver[:3])) + f"-SP{_ver[-1] or '0'}", } elif sys.platform.startswith("java"): _name, _ver, _arch = platform.java_ver()[-1] From d2c1e18cc26672004299e75c9a23475cabdb6834 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 18 Nov 2024 05:41:04 -0600 Subject: [PATCH 103/182] Bump pyright from 1.1.388 to 1.1.389 (#2007) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/typing.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/typing.txt b/requirements/typing.txt index ad799ea368..613eba7645 100644 --- a/requirements/typing.txt +++ b/requirements/typing.txt @@ -1,5 +1,5 @@ mypy==1.13.0 -pyright==1.1.388 +pyright==1.1.389 typing_extensions -r ./encryption.txt -r ./ocsp.txt From 18940030f17a8a887c653ef3bbf5aa71e52c86aa Mon Sep 17 00:00:00 2001 From: Shane Harvey Date: Mon, 18 Nov 2024 10:25:20 -0800 Subject: [PATCH 104/182] PYTHON-4921 Eliminate unnecessary killCursors command when batchSize == limit (#2004) --- pymongo/message.py | 4 + .../client-bulkWrite-replaceOne-sort.json | 3 +- .../client-bulkWrite-updateOne-sort.json | 3 +- test/crud/unified/distinct-hint.json | 139 +++++++++++++++ test/crud/unified/estimatedDocumentCount.json | 2 +- test/crud/unified/find.json | 62 +++++++ test/crud/unified/findOne.json | 158 ++++++++++++++++++ test/utils.py | 29 ---- 8 files changed, 368 insertions(+), 32 deletions(-) create mode 100644 test/crud/unified/distinct-hint.json create mode 100644 test/crud/unified/findOne.json diff --git a/pymongo/message.py b/pymongo/message.py index 3e2ae00ae7..b6c00f06cb 100644 --- a/pymongo/message.py +++ b/pymongo/message.py @@ -252,6 +252,10 @@ def _gen_find_command( if limit < 0: cmd["singleBatch"] = True if batch_size: + # When limit and batchSize are equal we increase batchSize by 1 to + # avoid an unnecessary killCursors. + if limit == batch_size: + batch_size += 1 cmd["batchSize"] = batch_size if read_concern.level and not (session and session.in_transaction): cmd["readConcern"] = read_concern.document diff --git a/test/crud/unified/client-bulkWrite-replaceOne-sort.json b/test/crud/unified/client-bulkWrite-replaceOne-sort.json index 53218c1f48..b86bc5f942 100644 --- a/test/crud/unified/client-bulkWrite-replaceOne-sort.json +++ b/test/crud/unified/client-bulkWrite-replaceOne-sort.json @@ -3,7 +3,8 @@ "schemaVersion": "1.4", "runOnRequirements": [ { - "minServerVersion": "8.0" + "minServerVersion": "8.0", + "serverless": "forbid" } ], "createEntities": [ diff --git a/test/crud/unified/client-bulkWrite-updateOne-sort.json b/test/crud/unified/client-bulkWrite-updateOne-sort.json index 4a07b8b97c..ef75dcb374 100644 --- a/test/crud/unified/client-bulkWrite-updateOne-sort.json +++ b/test/crud/unified/client-bulkWrite-updateOne-sort.json @@ -3,7 +3,8 @@ "schemaVersion": "1.4", "runOnRequirements": [ { - "minServerVersion": "8.0" + "minServerVersion": "8.0", + "serverless": "forbid" } ], "createEntities": [ diff --git a/test/crud/unified/distinct-hint.json b/test/crud/unified/distinct-hint.json new file mode 100644 index 0000000000..2a6869cbe0 --- /dev/null +++ b/test/crud/unified/distinct-hint.json @@ -0,0 +1,139 @@ +{ + "description": "distinct-hint", + "schemaVersion": "1.0", + "runOnRequirements": [ + { + "minServerVersion": "7.1.0" + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "database0", + "client": "client0", + "databaseName": "distinct-hint-tests" + } + }, + { + "collection": { + "id": "collection0", + "database": "database0", + "collectionName": "coll0" + } + } + ], + "initialData": [ + { + "collectionName": "coll0", + "databaseName": "distinct-hint-tests", + "documents": [ + { + "_id": 1, + "x": 11 + }, + { + "_id": 2, + "x": 22 + }, + { + "_id": 3, + "x": 33 + } + ] + } + ], + "tests": [ + { + "description": "distinct with hint string", + "operations": [ + { + "name": "distinct", + "object": "collection0", + "arguments": { + "fieldName": "x", + "filter": { + "_id": 1 + }, + "hint": "_id_" + }, + "expectResult": [ + 11 + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "distinct": "coll0", + "key": "x", + "query": { + "_id": 1 + }, + "hint": "_id_" + }, + "commandName": "distinct", + "databaseName": "distinct-hint-tests" + } + } + ] + } + ] + }, + { + "description": "distinct with hint document", + "operations": [ + { + "name": "distinct", + "object": "collection0", + "arguments": { + "fieldName": "x", + "filter": { + "_id": 1 + }, + "hint": { + "_id": 1 + } + }, + "expectResult": [ + 11 + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "distinct": "coll0", + "key": "x", + "query": { + "_id": 1 + }, + "hint": { + "_id": 1 + } + }, + "commandName": "distinct", + "databaseName": "distinct-hint-tests" + } + } + ] + } + ] + } + ] +} diff --git a/test/crud/unified/estimatedDocumentCount.json b/test/crud/unified/estimatedDocumentCount.json index 1b650c1cb6..3577d9006b 100644 --- a/test/crud/unified/estimatedDocumentCount.json +++ b/test/crud/unified/estimatedDocumentCount.json @@ -249,7 +249,7 @@ "name": "estimatedDocumentCount", "object": "collection0", "expectError": { - "isError": true + "isClientError": true } } ], diff --git a/test/crud/unified/find.json b/test/crud/unified/find.json index 6bf1e4e445..325cd96c21 100644 --- a/test/crud/unified/find.json +++ b/test/crud/unified/find.json @@ -237,6 +237,68 @@ ] } ] + }, + { + "description": "Find with batchSize equal to limit", + "operations": [ + { + "object": "collection0", + "name": "find", + "arguments": { + "filter": { + "_id": { + "$gt": 1 + } + }, + "sort": { + "_id": 1 + }, + "limit": 4, + "batchSize": 4 + }, + "expectResult": [ + { + "_id": 2, + "x": 22 + }, + { + "_id": 3, + "x": 33 + }, + { + "_id": 4, + "x": 44 + }, + { + "_id": 5, + "x": 55 + } + ] + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "find": "coll0", + "filter": { + "_id": { + "$gt": 1 + } + }, + "limit": 4, + "batchSize": 5 + }, + "commandName": "find", + "databaseName": "find-tests" + } + } + ] + } + ] } ] } diff --git a/test/crud/unified/findOne.json b/test/crud/unified/findOne.json new file mode 100644 index 0000000000..826c0f5dfd --- /dev/null +++ b/test/crud/unified/findOne.json @@ -0,0 +1,158 @@ +{ + "description": "findOne", + "schemaVersion": "1.0", + "createEntities": [ + { + "client": { + "id": "client0", + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "database0", + "client": "client0", + "databaseName": "find-tests" + } + }, + { + "collection": { + "id": "collection0", + "database": "database0", + "collectionName": "coll0" + } + } + ], + "initialData": [ + { + "collectionName": "coll0", + "databaseName": "find-tests", + "documents": [ + { + "_id": 1, + "x": 11 + }, + { + "_id": 2, + "x": 22 + }, + { + "_id": 3, + "x": 33 + }, + { + "_id": 4, + "x": 44 + }, + { + "_id": 5, + "x": 55 + }, + { + "_id": 6, + "x": 66 + } + ] + } + ], + "tests": [ + { + "description": "FindOne with filter", + "operations": [ + { + "object": "collection0", + "name": "findOne", + "arguments": { + "filter": { + "_id": 1 + } + }, + "expectResult": { + "_id": 1, + "x": 11 + } + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "find": "coll0", + "filter": { + "_id": 1 + }, + "batchSize": { + "$$exists": false + }, + "limit": 1, + "singleBatch": true + }, + "commandName": "find", + "databaseName": "find-tests" + } + } + ] + } + ] + }, + { + "description": "FindOne with filter, sort, and skip", + "operations": [ + { + "object": "collection0", + "name": "findOne", + "arguments": { + "filter": { + "_id": { + "$gt": 2 + } + }, + "sort": { + "_id": 1 + }, + "skip": 2 + }, + "expectResult": { + "_id": 5, + "x": 55 + } + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "command": { + "find": "coll0", + "filter": { + "_id": { + "$gt": 2 + } + }, + "sort": { + "_id": 1 + }, + "skip": 2, + "batchSize": { + "$$exists": false + }, + "limit": 1, + "singleBatch": true + }, + "commandName": "find", + "databaseName": "find-tests" + } + } + ] + } + ] + } + ] +} diff --git a/test/utils.py b/test/utils.py index 766f209de2..9b326e5d73 100644 --- a/test/utils.py +++ b/test/utils.py @@ -925,35 +925,6 @@ def parse_spec_options(opts): if "maxCommitTimeMS" in opts: opts["max_commit_time_ms"] = opts.pop("maxCommitTimeMS") - if "hint" in opts: - hint = opts.pop("hint") - if not isinstance(hint, str): - hint = list(hint.items()) - opts["hint"] = hint - - # Properly format 'hint' arguments for the Bulk API tests. - if "requests" in opts: - reqs = opts.pop("requests") - for req in reqs: - if "name" in req: - # CRUD v2 format - args = req.pop("arguments", {}) - if "hint" in args: - hint = args.pop("hint") - if not isinstance(hint, str): - hint = list(hint.items()) - args["hint"] = hint - req["arguments"] = args - else: - # Unified test format - bulk_model, spec = next(iter(req.items())) - if "hint" in spec: - hint = spec.pop("hint") - if not isinstance(hint, str): - hint = list(hint.items()) - spec["hint"] = hint - opts["requests"] = reqs - return dict(opts) From c9d9d7c2dc6e8077ea048261e46e6a8264da4ef0 Mon Sep 17 00:00:00 2001 From: Shane Harvey Date: Mon, 18 Nov 2024 10:25:46 -0800 Subject: [PATCH 105/182] PYTHON-4907 Avoid noisy TypeError at interpreter exit (#2005) --- pymongo/asynchronous/mongo_client.py | 3 ++- pymongo/synchronous/mongo_client.py | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/pymongo/asynchronous/mongo_client.py b/pymongo/asynchronous/mongo_client.py index e4fdf25c28..3e4dc482d7 100644 --- a/pymongo/asynchronous/mongo_client.py +++ b/pymongo/asynchronous/mongo_client.py @@ -1195,7 +1195,8 @@ def __del__(self) -> None: ResourceWarning, stacklevel=2, ) - except AttributeError: + except (AttributeError, TypeError): + # Ignore errors at interpreter exit. pass def _close_cursor_soon( diff --git a/pymongo/synchronous/mongo_client.py b/pymongo/synchronous/mongo_client.py index 0380d4468b..00c6203a94 100644 --- a/pymongo/synchronous/mongo_client.py +++ b/pymongo/synchronous/mongo_client.py @@ -1193,7 +1193,8 @@ def __del__(self) -> None: ResourceWarning, stacklevel=2, ) - except AttributeError: + except (AttributeError, TypeError): + # Ignore errors at interpreter exit. pass def _close_cursor_soon( From 1dd42173e1101e1d1f8c41758337d617bb737d41 Mon Sep 17 00:00:00 2001 From: Shane Harvey Date: Mon, 18 Nov 2024 10:26:22 -0800 Subject: [PATCH 106/182] PYTHON-4919 Resync tests for retryable writes (#2006) --- test/asynchronous/test_retryable_writes.py | 44 +---- .../unified/aggregate-out-merge.json | 144 ++++++++++++++++ test/retryable_writes/unified/bulkWrite.json | 154 +++++++++++++++++- .../client-bulkWrite-serverErrors.json | 15 +- test/retryable_writes/unified/deleteMany.json | 22 ++- test/retryable_writes/unified/deleteOne.json | 32 +++- .../unified/findOneAndDelete.json | 32 +++- .../unified/findOneAndReplace.json | 32 +++- .../unified/findOneAndUpdate.json | 32 +++- test/retryable_writes/unified/insertMany.json | 59 ++++++- test/retryable_writes/unified/insertOne.json | 32 +++- test/retryable_writes/unified/replaceOne.json | 32 +++- .../unified/unacknowledged-write-concern.json | 77 +++++++++ test/retryable_writes/unified/updateMany.json | 22 ++- test/retryable_writes/unified/updateOne.json | 32 +++- test/test_retryable_writes.py | 44 +---- 16 files changed, 705 insertions(+), 100 deletions(-) create mode 100644 test/retryable_writes/unified/aggregate-out-merge.json create mode 100644 test/retryable_writes/unified/unacknowledged-write-concern.json diff --git a/test/asynchronous/test_retryable_writes.py b/test/asynchronous/test_retryable_writes.py index accbbd003f..ca2f0a5422 100644 --- a/test/asynchronous/test_retryable_writes.py +++ b/test/asynchronous/test_retryable_writes.py @@ -1,4 +1,4 @@ -# Copyright 2017 MongoDB, Inc. +# Copyright 2017-present MongoDB, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -43,7 +43,6 @@ from bson.int64 import Int64 from bson.raw_bson import RawBSONDocument from bson.son import SON -from pymongo.asynchronous.mongo_client import AsyncMongoClient from pymongo.errors import ( AutoReconnect, ConnectionFailure, @@ -226,47 +225,6 @@ async def test_supported_single_statement_no_retry(self): f"{msg} sent txnNumber with {event.command_name}", ) - @async_client_context.require_no_standalone - async def test_supported_single_statement_supported_cluster(self): - for method, args, kwargs in retryable_single_statement_ops(self.db.retryable_write_test): - msg = f"{method.__name__}(*{args!r}, **{kwargs!r})" - self.listener.reset() - await method(*args, **kwargs) - commands_started = self.listener.started_events - self.assertEqual(len(self.listener.succeeded_events), 1, msg) - first_attempt = commands_started[0] - self.assertIn( - "lsid", - first_attempt.command, - f"{msg} sent no lsid with {first_attempt.command_name}", - ) - initial_session_id = first_attempt.command["lsid"] - self.assertIn( - "txnNumber", - first_attempt.command, - f"{msg} sent no txnNumber with {first_attempt.command_name}", - ) - - # There should be no retry when the failpoint is not active. - if async_client_context.is_mongos or not async_client_context.test_commands_enabled: - self.assertEqual(len(commands_started), 1) - continue - - initial_transaction_id = first_attempt.command["txnNumber"] - retry_attempt = commands_started[1] - self.assertIn( - "lsid", - retry_attempt.command, - f"{msg} sent no lsid with {first_attempt.command_name}", - ) - self.assertEqual(retry_attempt.command["lsid"], initial_session_id, msg) - self.assertIn( - "txnNumber", - retry_attempt.command, - f"{msg} sent no txnNumber with {first_attempt.command_name}", - ) - self.assertEqual(retry_attempt.command["txnNumber"], initial_transaction_id, msg) - async def test_supported_single_statement_unsupported_cluster(self): if async_client_context.is_rs or async_client_context.is_mongos: raise SkipTest("This cluster supports retryable writes") diff --git a/test/retryable_writes/unified/aggregate-out-merge.json b/test/retryable_writes/unified/aggregate-out-merge.json new file mode 100644 index 0000000000..c46bf8c31f --- /dev/null +++ b/test/retryable_writes/unified/aggregate-out-merge.json @@ -0,0 +1,144 @@ +{ + "description": "aggregate with $out/$merge does not set txnNumber", + "schemaVersion": "1.3", + "runOnRequirements": [ + { + "minServerVersion": "3.6", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ] + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "database0", + "client": "client0", + "databaseName": "retryable-writes-tests" + } + }, + { + "collection": { + "id": "collection0", + "database": "database0", + "collectionName": "coll0" + } + } + ], + "initialData": [ + { + "collectionName": "mergeCollection", + "databaseName": "retryable-writes-tests", + "documents": [] + } + ], + "tests": [ + { + "description": "aggregate with $out does not set txnNumber", + "operations": [ + { + "object": "collection0", + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$sort": { + "x": 1 + } + }, + { + "$match": { + "_id": { + "$gt": 1 + } + } + }, + { + "$out": "outCollection" + } + ] + } + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "commandName": "aggregate", + "command": { + "txnNumber": { + "$$exists": false + } + } + } + } + ] + } + ] + }, + { + "description": "aggregate with $merge does not set txnNumber", + "runOnRequirements": [ + { + "minServerVersion": "4.1.11" + } + ], + "operations": [ + { + "object": "collection0", + "name": "aggregate", + "arguments": { + "pipeline": [ + { + "$sort": { + "x": 1 + } + }, + { + "$match": { + "_id": { + "$gt": 1 + } + } + }, + { + "$merge": { + "into": "mergeCollection" + } + } + ] + } + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "commandName": "aggregate", + "command": { + "txnNumber": { + "$$exists": false + } + } + } + } + ] + } + ] + } + ] +} diff --git a/test/retryable_writes/unified/bulkWrite.json b/test/retryable_writes/unified/bulkWrite.json index 691321746b..f2bd9e0eb8 100644 --- a/test/retryable_writes/unified/bulkWrite.json +++ b/test/retryable_writes/unified/bulkWrite.json @@ -13,7 +13,10 @@ { "client": { "id": "client0", - "useMultipleMongoses": false + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent" + ] } }, { @@ -121,6 +124,53 @@ } ] } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "commandName": "insert", + "command": { + "txnNumber": { + "$$exists": true + } + } + } + }, + { + "commandStartedEvent": { + "commandName": "insert", + "command": { + "txnNumber": { + "$$exists": true + } + } + } + }, + { + "commandStartedEvent": { + "commandName": "update", + "command": { + "txnNumber": { + "$$exists": true + } + } + } + }, + { + "commandStartedEvent": { + "commandName": "delete", + "command": { + "txnNumber": { + "$$exists": true + } + } + } + } + ] + } ] }, { @@ -510,6 +560,33 @@ } ] } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "commandName": "insert", + "command": { + "txnNumber": { + "$$exists": true + } + } + } + }, + { + "commandStartedEvent": { + "commandName": "insert", + "command": { + "txnNumber": { + "$$exists": true + } + } + } + } + ] + } ] }, { @@ -926,6 +1003,81 @@ ] } ] + }, + { + "description": "collection bulkWrite with updateMany does not set txnNumber", + "operations": [ + { + "object": "collection0", + "name": "bulkWrite", + "arguments": { + "requests": [ + { + "updateMany": { + "filter": {}, + "update": { + "$set": { + "x": 1 + } + } + } + } + ] + } + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "commandName": "update", + "command": { + "txnNumber": { + "$$exists": false + } + } + } + } + ] + } + ] + }, + { + "description": "collection bulkWrite with deleteMany does not set txnNumber", + "operations": [ + { + "object": "collection0", + "name": "bulkWrite", + "arguments": { + "requests": [ + { + "deleteMany": { + "filter": {} + } + } + ] + } + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "commandName": "delete", + "command": { + "txnNumber": { + "$$exists": false + } + } + } + } + ] + } + ] } ] } diff --git a/test/retryable_writes/unified/client-bulkWrite-serverErrors.json b/test/retryable_writes/unified/client-bulkWrite-serverErrors.json index f58c82bcc7..a1f7c8152a 100644 --- a/test/retryable_writes/unified/client-bulkWrite-serverErrors.json +++ b/test/retryable_writes/unified/client-bulkWrite-serverErrors.json @@ -428,7 +428,10 @@ { "ns": "retryable-writes-tests.coll0" } - ] + ], + "txnNumber": { + "$$exists": false + } } } } @@ -779,7 +782,10 @@ { "ns": "retryable-writes-tests.coll0" } - ] + ], + "txnNumber": { + "$$exists": false + } } } } @@ -861,7 +867,10 @@ { "ns": "retryable-writes-tests.coll0" } - ] + ], + "txnNumber": { + "$$exists": false + } } } } diff --git a/test/retryable_writes/unified/deleteMany.json b/test/retryable_writes/unified/deleteMany.json index 087576cc0f..381f377954 100644 --- a/test/retryable_writes/unified/deleteMany.json +++ b/test/retryable_writes/unified/deleteMany.json @@ -15,7 +15,10 @@ { "client": { "id": "client0", - "useMultipleMongoses": true + "useMultipleMongoses": true, + "observeEvents": [ + "commandStartedEvent" + ] } }, { @@ -70,6 +73,23 @@ "databaseName": "retryable-writes-tests", "documents": [] } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "commandName": "delete", + "command": { + "txnNumber": { + "$$exists": false + } + } + } + } + ] + } ] } ] diff --git a/test/retryable_writes/unified/deleteOne.json b/test/retryable_writes/unified/deleteOne.json index c3aaf88655..9e37ff8bcf 100644 --- a/test/retryable_writes/unified/deleteOne.json +++ b/test/retryable_writes/unified/deleteOne.json @@ -13,7 +13,10 @@ { "client": { "id": "client0", - "useMultipleMongoses": false + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent" + ] } }, { @@ -88,6 +91,33 @@ } ] } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "commandName": "delete", + "command": { + "txnNumber": { + "$$exists": true + } + } + } + }, + { + "commandStartedEvent": { + "commandName": "delete", + "command": { + "txnNumber": { + "$$exists": true + } + } + } + } + ] + } ] }, { diff --git a/test/retryable_writes/unified/findOneAndDelete.json b/test/retryable_writes/unified/findOneAndDelete.json index 89dbb9d655..ebfb8ce665 100644 --- a/test/retryable_writes/unified/findOneAndDelete.json +++ b/test/retryable_writes/unified/findOneAndDelete.json @@ -13,7 +13,10 @@ { "client": { "id": "client0", - "useMultipleMongoses": false + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent" + ] } }, { @@ -94,6 +97,33 @@ } ] } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "commandName": "findAndModify", + "command": { + "txnNumber": { + "$$exists": true + } + } + } + }, + { + "commandStartedEvent": { + "commandName": "findAndModify", + "command": { + "txnNumber": { + "$$exists": true + } + } + } + } + ] + } ] }, { diff --git a/test/retryable_writes/unified/findOneAndReplace.json b/test/retryable_writes/unified/findOneAndReplace.json index 6d1cc17974..638d15a41d 100644 --- a/test/retryable_writes/unified/findOneAndReplace.json +++ b/test/retryable_writes/unified/findOneAndReplace.json @@ -13,7 +13,10 @@ { "client": { "id": "client0", - "useMultipleMongoses": false + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent" + ] } }, { @@ -98,6 +101,33 @@ } ] } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "commandName": "findAndModify", + "command": { + "txnNumber": { + "$$exists": true + } + } + } + }, + { + "commandStartedEvent": { + "commandName": "findAndModify", + "command": { + "txnNumber": { + "$$exists": true + } + } + } + } + ] + } ] }, { diff --git a/test/retryable_writes/unified/findOneAndUpdate.json b/test/retryable_writes/unified/findOneAndUpdate.json index eb88fbe9b3..eefe98ae11 100644 --- a/test/retryable_writes/unified/findOneAndUpdate.json +++ b/test/retryable_writes/unified/findOneAndUpdate.json @@ -13,7 +13,10 @@ { "client": { "id": "client0", - "useMultipleMongoses": false + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent" + ] } }, { @@ -99,6 +102,33 @@ } ] } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "commandName": "findAndModify", + "command": { + "txnNumber": { + "$$exists": true + } + } + } + }, + { + "commandStartedEvent": { + "commandName": "findAndModify", + "command": { + "txnNumber": { + "$$exists": true + } + } + } + } + ] + } ] }, { diff --git a/test/retryable_writes/unified/insertMany.json b/test/retryable_writes/unified/insertMany.json index 47181d0a9e..35a18c46c6 100644 --- a/test/retryable_writes/unified/insertMany.json +++ b/test/retryable_writes/unified/insertMany.json @@ -13,7 +13,10 @@ { "client": { "id": "client0", - "useMultipleMongoses": false + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent" + ] } }, { @@ -107,6 +110,33 @@ } ] } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "commandName": "insert", + "command": { + "txnNumber": { + "$$exists": true + } + } + } + }, + { + "commandStartedEvent": { + "commandName": "insert", + "command": { + "txnNumber": { + "$$exists": true + } + } + } + } + ] + } ] }, { @@ -172,6 +202,33 @@ } ] } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "commandName": "insert", + "command": { + "txnNumber": { + "$$exists": true + } + } + } + }, + { + "commandStartedEvent": { + "commandName": "insert", + "command": { + "txnNumber": { + "$$exists": true + } + } + } + } + ] + } ] }, { diff --git a/test/retryable_writes/unified/insertOne.json b/test/retryable_writes/unified/insertOne.json index 61957415ed..a6afdbf224 100644 --- a/test/retryable_writes/unified/insertOne.json +++ b/test/retryable_writes/unified/insertOne.json @@ -13,7 +13,10 @@ { "client": { "id": "client0", - "useMultipleMongoses": false + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent" + ] } }, { @@ -101,6 +104,33 @@ } ] } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "commandName": "insert", + "command": { + "txnNumber": { + "$$exists": true + } + } + } + }, + { + "commandStartedEvent": { + "commandName": "insert", + "command": { + "txnNumber": { + "$$exists": true + } + } + } + } + ] + } ] }, { diff --git a/test/retryable_writes/unified/replaceOne.json b/test/retryable_writes/unified/replaceOne.json index e58625bb5e..ee6e37d3bb 100644 --- a/test/retryable_writes/unified/replaceOne.json +++ b/test/retryable_writes/unified/replaceOne.json @@ -13,7 +13,10 @@ { "client": { "id": "client0", - "useMultipleMongoses": false + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent" + ] } }, { @@ -98,6 +101,33 @@ } ] } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "commandName": "update", + "command": { + "txnNumber": { + "$$exists": true + } + } + } + }, + { + "commandStartedEvent": { + "commandName": "update", + "command": { + "txnNumber": { + "$$exists": true + } + } + } + } + ] + } ] }, { diff --git a/test/retryable_writes/unified/unacknowledged-write-concern.json b/test/retryable_writes/unified/unacknowledged-write-concern.json new file mode 100644 index 0000000000..eaa114acfd --- /dev/null +++ b/test/retryable_writes/unified/unacknowledged-write-concern.json @@ -0,0 +1,77 @@ +{ + "description": "unacknowledged write does not set txnNumber", + "schemaVersion": "1.3", + "runOnRequirements": [ + { + "minServerVersion": "3.6", + "topologies": [ + "replicaset", + "sharded", + "load-balanced" + ] + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "database0", + "client": "client0", + "databaseName": "retryable-writes-tests" + } + }, + { + "collection": { + "id": "collection0", + "database": "database0", + "collectionName": "coll0", + "collectionOptions": { + "writeConcern": { + "w": 0 + } + } + } + } + ], + "tests": [ + { + "description": "unacknowledged write does not set txnNumber", + "operations": [ + { + "object": "collection0", + "name": "insertOne", + "arguments": { + "document": { + "_id": 1, + "x": 11 + } + } + } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "commandName": "insert", + "command": { + "txnNumber": { + "$$exists": false + } + } + } + } + ] + } + ] + } + ] +} diff --git a/test/retryable_writes/unified/updateMany.json b/test/retryable_writes/unified/updateMany.json index 260b7ad1c6..12c5204ee9 100644 --- a/test/retryable_writes/unified/updateMany.json +++ b/test/retryable_writes/unified/updateMany.json @@ -15,7 +15,10 @@ { "client": { "id": "client0", - "useMultipleMongoses": true + "useMultipleMongoses": true, + "observeEvents": [ + "commandStartedEvent" + ] } }, { @@ -86,6 +89,23 @@ } ] } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "commandName": "update", + "command": { + "txnNumber": { + "$$exists": false + } + } + } + } + ] + } ] } ] diff --git a/test/retryable_writes/unified/updateOne.json b/test/retryable_writes/unified/updateOne.json index 7947cef3c0..99ffba8e21 100644 --- a/test/retryable_writes/unified/updateOne.json +++ b/test/retryable_writes/unified/updateOne.json @@ -13,7 +13,10 @@ { "client": { "id": "client0", - "useMultipleMongoses": false + "useMultipleMongoses": false, + "observeEvents": [ + "commandStartedEvent" + ] } }, { @@ -99,6 +102,33 @@ } ] } + ], + "expectEvents": [ + { + "client": "client0", + "events": [ + { + "commandStartedEvent": { + "commandName": "update", + "command": { + "txnNumber": { + "$$exists": true + } + } + } + }, + { + "commandStartedEvent": { + "commandName": "update", + "command": { + "txnNumber": { + "$$exists": true + } + } + } + } + ] + } ] }, { diff --git a/test/test_retryable_writes.py b/test/test_retryable_writes.py index 5df6c41f7a..74f3c23e51 100644 --- a/test/test_retryable_writes.py +++ b/test/test_retryable_writes.py @@ -1,4 +1,4 @@ -# Copyright 2017 MongoDB, Inc. +# Copyright 2017-present MongoDB, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -65,7 +65,6 @@ UpdateMany, UpdateOne, ) -from pymongo.synchronous.mongo_client import MongoClient from pymongo.write_concern import WriteConcern _IS_SYNC = True @@ -226,47 +225,6 @@ def test_supported_single_statement_no_retry(self): f"{msg} sent txnNumber with {event.command_name}", ) - @client_context.require_no_standalone - def test_supported_single_statement_supported_cluster(self): - for method, args, kwargs in retryable_single_statement_ops(self.db.retryable_write_test): - msg = f"{method.__name__}(*{args!r}, **{kwargs!r})" - self.listener.reset() - method(*args, **kwargs) - commands_started = self.listener.started_events - self.assertEqual(len(self.listener.succeeded_events), 1, msg) - first_attempt = commands_started[0] - self.assertIn( - "lsid", - first_attempt.command, - f"{msg} sent no lsid with {first_attempt.command_name}", - ) - initial_session_id = first_attempt.command["lsid"] - self.assertIn( - "txnNumber", - first_attempt.command, - f"{msg} sent no txnNumber with {first_attempt.command_name}", - ) - - # There should be no retry when the failpoint is not active. - if client_context.is_mongos or not client_context.test_commands_enabled: - self.assertEqual(len(commands_started), 1) - continue - - initial_transaction_id = first_attempt.command["txnNumber"] - retry_attempt = commands_started[1] - self.assertIn( - "lsid", - retry_attempt.command, - f"{msg} sent no lsid with {first_attempt.command_name}", - ) - self.assertEqual(retry_attempt.command["lsid"], initial_session_id, msg) - self.assertIn( - "txnNumber", - retry_attempt.command, - f"{msg} sent no txnNumber with {first_attempt.command_name}", - ) - self.assertEqual(retry_attempt.command["txnNumber"], initial_transaction_id, msg) - def test_supported_single_statement_unsupported_cluster(self): if client_context.is_rs or client_context.is_mongos: raise SkipTest("This cluster supports retryable writes") From a3bdc133ca497c2e966e41a283c1b712d045f7fe Mon Sep 17 00:00:00 2001 From: Shane Harvey Date: Mon, 18 Nov 2024 12:17:33 -0800 Subject: [PATCH 107/182] PYTHON-4356 Unskip spec tests for agg $out (#2008) --- test/asynchronous/unified_format.py | 9 --------- test/unified_format.py | 9 --------- 2 files changed, 18 deletions(-) diff --git a/test/asynchronous/unified_format.py b/test/asynchronous/unified_format.py index 81feed4d4c..ea61ecbe99 100644 --- a/test/asynchronous/unified_format.py +++ b/test/asynchronous/unified_format.py @@ -545,15 +545,6 @@ def maybe_skip_test(self, spec): or "Cancel server check" in spec["description"] ): self.skipTest("MMAPv1 does not support retryWrites=True") - if ( - "AsyncDatabase-level aggregate with $out includes read preference for 5.0+ server" - in spec["description"] - ): - if async_client_context.version[0] == 8: - self.skipTest("waiting on PYTHON-4356") - if "Aggregate with $out includes read preference for 5.0+ server" in spec["description"]: - if async_client_context.version[0] == 8: - self.skipTest("waiting on PYTHON-4356") if "Client side error in command starting transaction" in spec["description"]: self.skipTest("Implement PYTHON-1894") if "timeoutMS applied to entire download" in spec["description"]: diff --git a/test/unified_format.py b/test/unified_format.py index 395d40b2d1..1bcd750aef 100644 --- a/test/unified_format.py +++ b/test/unified_format.py @@ -543,15 +543,6 @@ def maybe_skip_test(self, spec): or "Cancel server check" in spec["description"] ): self.skipTest("MMAPv1 does not support retryWrites=True") - if ( - "Database-level aggregate with $out includes read preference for 5.0+ server" - in spec["description"] - ): - if client_context.version[0] == 8: - self.skipTest("waiting on PYTHON-4356") - if "Aggregate with $out includes read preference for 5.0+ server" in spec["description"]: - if client_context.version[0] == 8: - self.skipTest("waiting on PYTHON-4356") if "Client side error in command starting transaction" in spec["description"]: self.skipTest("Implement PYTHON-1894") if "timeoutMS applied to entire download" in spec["description"]: From a7c1090056c12ec9c492451917177954274daa59 Mon Sep 17 00:00:00 2001 From: Shane Harvey Date: Tue, 19 Nov 2024 10:46:57 -0800 Subject: [PATCH 108/182] PYTHON-4414 interruptInUseConnections should cancel pending connections too (#2010) --- pymongo/asynchronous/pool.py | 10 ++++++++++ pymongo/synchronous/pool.py | 10 ++++++++++ test/test_connection_monitoring.py | 5 ----- 3 files changed, 20 insertions(+), 5 deletions(-) diff --git a/pymongo/asynchronous/pool.py b/pymongo/asynchronous/pool.py index a9f02d650a..ca0cebd417 100644 --- a/pymongo/asynchronous/pool.py +++ b/pymongo/asynchronous/pool.py @@ -1249,6 +1249,9 @@ async def connect(self, handler: Optional[_MongoClientErrorHandler] = None) -> A async with self.lock: conn_id = self.next_connection_id self.next_connection_id += 1 + # Use a temporary context so that interrupt_connections can cancel creating the socket. + tmp_context = _CancellationContext() + self.active_contexts.add(tmp_context) listeners = self.opts._event_listeners if self.enabled_for_cmap: @@ -1267,6 +1270,8 @@ async def connect(self, handler: Optional[_MongoClientErrorHandler] = None) -> A try: sock = await _configured_socket(self.address, self.opts) except BaseException as error: + async with self.lock: + self.active_contexts.discard(tmp_context) if self.enabled_for_cmap: assert listeners is not None listeners.publish_connection_closed( @@ -1292,6 +1297,9 @@ async def connect(self, handler: Optional[_MongoClientErrorHandler] = None) -> A conn = AsyncConnection(sock, self, self.address, conn_id) # type: ignore[arg-type] async with self.lock: self.active_contexts.add(conn.cancel_context) + self.active_contexts.discard(tmp_context) + if tmp_context.cancelled: + conn.cancel_context.cancel() try: if self.handshake: await conn.hello() @@ -1301,6 +1309,8 @@ async def connect(self, handler: Optional[_MongoClientErrorHandler] = None) -> A await conn.authenticate() except BaseException: + async with self.lock: + self.active_contexts.discard(conn.cancel_context) conn.close_conn(ConnectionClosedReason.ERROR) raise diff --git a/pymongo/synchronous/pool.py b/pymongo/synchronous/pool.py index eb007a3471..86baf15b9a 100644 --- a/pymongo/synchronous/pool.py +++ b/pymongo/synchronous/pool.py @@ -1243,6 +1243,9 @@ def connect(self, handler: Optional[_MongoClientErrorHandler] = None) -> Connect with self.lock: conn_id = self.next_connection_id self.next_connection_id += 1 + # Use a temporary context so that interrupt_connections can cancel creating the socket. + tmp_context = _CancellationContext() + self.active_contexts.add(tmp_context) listeners = self.opts._event_listeners if self.enabled_for_cmap: @@ -1261,6 +1264,8 @@ def connect(self, handler: Optional[_MongoClientErrorHandler] = None) -> Connect try: sock = _configured_socket(self.address, self.opts) except BaseException as error: + with self.lock: + self.active_contexts.discard(tmp_context) if self.enabled_for_cmap: assert listeners is not None listeners.publish_connection_closed( @@ -1286,6 +1291,9 @@ def connect(self, handler: Optional[_MongoClientErrorHandler] = None) -> Connect conn = Connection(sock, self, self.address, conn_id) # type: ignore[arg-type] with self.lock: self.active_contexts.add(conn.cancel_context) + self.active_contexts.discard(tmp_context) + if tmp_context.cancelled: + conn.cancel_context.cancel() try: if self.handshake: conn.hello() @@ -1295,6 +1303,8 @@ def connect(self, handler: Optional[_MongoClientErrorHandler] = None) -> Connect conn.authenticate() except BaseException: + with self.lock: + self.active_contexts.discard(conn.cancel_context) conn.close_conn(ConnectionClosedReason.ERROR) raise diff --git a/test/test_connection_monitoring.py b/test/test_connection_monitoring.py index d576a1184a..05411d17ba 100644 --- a/test/test_connection_monitoring.py +++ b/test/test_connection_monitoring.py @@ -216,11 +216,6 @@ def set_fail_point(self, command_args): def run_scenario(self, scenario_def, test): """Run a CMAP spec test.""" - if ( - scenario_def["description"] - == "clear with interruptInUseConnections = true closes pending connections" - ): - self.skipTest("Skip pending PYTHON-4414") self.logs: list = [] self.assertEqual(scenario_def["version"], 1) self.assertIn(scenario_def["style"], ["unit", "integration"]) From ddf783b69a400411db2bee155052f648396c3c7f Mon Sep 17 00:00:00 2001 From: Shane Harvey Date: Tue, 19 Nov 2024 15:43:17 -0800 Subject: [PATCH 109/182] PYTHON-4982 Remove redundant configureFailPoint (#2012) --- test/asynchronous/test_retryable_reads.py | 5 ++--- test/test_retryable_reads.py | 5 ++--- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/test/asynchronous/test_retryable_reads.py b/test/asynchronous/test_retryable_reads.py index b2d86f5d84..bde7a9f2ee 100644 --- a/test/asynchronous/test_retryable_reads.py +++ b/test/asynchronous/test_retryable_reads.py @@ -174,9 +174,8 @@ async def test_retryable_reads_in_sharded_cluster_multiple_available(self): retryReads=True, ) - async with self.fail_point(fail_command): - with self.assertRaises(AutoReconnect): - await client.t.t.find_one({}) + with self.assertRaises(AutoReconnect): + await client.t.t.find_one({}) # Disable failpoints on each mongos for client in mongos_clients: diff --git a/test/test_retryable_reads.py b/test/test_retryable_reads.py index d4951db5ee..9c3f6b170f 100644 --- a/test/test_retryable_reads.py +++ b/test/test_retryable_reads.py @@ -174,9 +174,8 @@ def test_retryable_reads_in_sharded_cluster_multiple_available(self): retryReads=True, ) - with self.fail_point(fail_command): - with self.assertRaises(AutoReconnect): - client.t.t.find_one({}) + with self.assertRaises(AutoReconnect): + client.t.t.find_one({}) # Disable failpoints on each mongos for client in mongos_clients: From b5f010404809b4a5770173837f467a307e9b084a Mon Sep 17 00:00:00 2001 From: Shane Harvey Date: Tue, 19 Nov 2024 15:43:28 -0800 Subject: [PATCH 110/182] PYTHON-4980 Ignore network error on killAllSessions (#2011) --- test/asynchronous/unified_format.py | 4 +++- test/asynchronous/utils_spec_runner.py | 5 +++-- test/unified_format.py | 4 +++- test/utils_spec_runner.py | 5 +++-- 4 files changed, 12 insertions(+), 6 deletions(-) diff --git a/test/asynchronous/unified_format.py b/test/asynchronous/unified_format.py index ea61ecbe99..db5ed81e24 100644 --- a/test/asynchronous/unified_format.py +++ b/test/asynchronous/unified_format.py @@ -76,6 +76,7 @@ from pymongo.asynchronous.helpers import anext from pymongo.encryption_options import _HAVE_PYMONGOCRYPT from pymongo.errors import ( + AutoReconnect, BulkWriteError, ClientBulkWriteException, ConfigurationError, @@ -755,9 +756,10 @@ async def kill_all_sessions(self): for client in clients: try: await client.admin.command("killAllSessions", []) - except OperationFailure: + except (OperationFailure, AutoReconnect): # "operation was interrupted" by killing the command's # own session. + # On 8.0+ killAllSessions sometimes returns a network error. pass async def _databaseOperation_listCollections(self, target, *args, **kwargs): diff --git a/test/asynchronous/utils_spec_runner.py b/test/asynchronous/utils_spec_runner.py index 4d9c4c8f20..f27f52ec2c 100644 --- a/test/asynchronous/utils_spec_runner.py +++ b/test/asynchronous/utils_spec_runner.py @@ -46,7 +46,7 @@ from pymongo.asynchronous import client_session from pymongo.asynchronous.command_cursor import AsyncCommandCursor from pymongo.asynchronous.cursor import AsyncCursor -from pymongo.errors import BulkWriteError, OperationFailure, PyMongoError +from pymongo.errors import AutoReconnect, BulkWriteError, OperationFailure, PyMongoError from pymongo.read_concern import ReadConcern from pymongo.read_preferences import ReadPreference from pymongo.results import BulkWriteResult, _WriteResult @@ -343,9 +343,10 @@ async def kill_all_sessions(self): for client in clients: try: await client.admin.command("killAllSessions", []) - except OperationFailure: + except (OperationFailure, AutoReconnect): # "operation was interrupted" by killing the command's # own session. + # On 8.0+ killAllSessions sometimes returns a network error. pass def check_command_result(self, expected_result, result): diff --git a/test/unified_format.py b/test/unified_format.py index 1bcd750aef..3489a8ac84 100644 --- a/test/unified_format.py +++ b/test/unified_format.py @@ -69,6 +69,7 @@ from pymongo import ASCENDING, CursorType, MongoClient, _csot from pymongo.encryption_options import _HAVE_PYMONGOCRYPT from pymongo.errors import ( + AutoReconnect, BulkWriteError, ClientBulkWriteException, ConfigurationError, @@ -751,9 +752,10 @@ def kill_all_sessions(self): for client in clients: try: client.admin.command("killAllSessions", []) - except OperationFailure: + except (OperationFailure, AutoReconnect): # "operation was interrupted" by killing the command's # own session. + # On 8.0+ killAllSessions sometimes returns a network error. pass def _databaseOperation_listCollections(self, target, *args, **kwargs): diff --git a/test/utils_spec_runner.py b/test/utils_spec_runner.py index 8a061de0b1..8b2679d776 100644 --- a/test/utils_spec_runner.py +++ b/test/utils_spec_runner.py @@ -43,7 +43,7 @@ from bson.son import SON from gridfs import GridFSBucket from gridfs.synchronous.grid_file import GridFSBucket -from pymongo.errors import BulkWriteError, OperationFailure, PyMongoError +from pymongo.errors import AutoReconnect, BulkWriteError, OperationFailure, PyMongoError from pymongo.read_concern import ReadConcern from pymongo.read_preferences import ReadPreference from pymongo.results import BulkWriteResult, _WriteResult @@ -343,9 +343,10 @@ def kill_all_sessions(self): for client in clients: try: client.admin.command("killAllSessions", []) - except OperationFailure: + except (OperationFailure, AutoReconnect): # "operation was interrupted" by killing the command's # own session. + # On 8.0+ killAllSessions sometimes returns a network error. pass def check_command_result(self, expected_result, result): From 89f4e5c786d4439c92914614474306910ccc8142 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Wed, 20 Nov 2024 09:21:30 -0600 Subject: [PATCH 111/182] PYTHON-3730 Ensure C extensions when running the test suite (#2013) --- .evergreen/run-tests.sh | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.evergreen/run-tests.sh b/.evergreen/run-tests.sh index 36fa76e317..1e03e27147 100755 --- a/.evergreen/run-tests.sh +++ b/.evergreen/run-tests.sh @@ -47,6 +47,11 @@ else echo "Not sourcing secrets" fi +# Ensure C extensions have compiled. +if [ -z "${NO_EXT:-}" ]; then + python tools/fail_if_no_c.py +fi + if [ "$AUTH" != "noauth" ]; then if [ ! -z "$TEST_DATA_LAKE" ]; then export DB_USER="mhuser" From 906d021bb1a8b4c381ce79f943c5c57b4314ecb7 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Wed, 20 Nov 2024 11:56:10 -0600 Subject: [PATCH 112/182] PYTHON-4447 Test OIDC on Server Latest (#2014) --- .evergreen/config.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/.evergreen/config.yml b/.evergreen/config.yml index fc1713a88e..1e4996c288 100644 --- a/.evergreen/config.yml +++ b/.evergreen/config.yml @@ -922,9 +922,6 @@ task_groups: params: binary: bash include_expansions_in_env: ["AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN"] - env: - # PYTHON-4447 - MONGODB_VERSION: "8.0" args: - ${DRIVERS_TOOLS}/.evergreen/auth_oidc/setup.sh teardown_task: From 1c7a7fe9ec3119228bc7bf98f1f9de199a4f8f2c Mon Sep 17 00:00:00 2001 From: Noah Stapp Date: Wed, 20 Nov 2024 14:47:28 -0500 Subject: [PATCH 113/182] PYTHON-4721 - Create individualized scripts for all shell.exec commands (#1997) Co-authored-by: Jib --- .evergreen/config.yml | 710 ++++++++---------- .evergreen/hatch.sh | 2 +- .evergreen/run-tests.sh | 8 +- .evergreen/scripts/archive-mongodb-logs.sh | 8 + .../scripts/bootstrap-mongo-orchestration.sh | 46 ++ .evergreen/scripts/check-import-time.sh | 7 + .evergreen/scripts/cleanup.sh | 7 + .evergreen/scripts/configure-env.sh | 19 +- .../scripts/download-and-merge-coverage.sh | 4 + .evergreen/scripts/fix-absolute-paths.sh | 8 + .evergreen/scripts/init-test-results.sh | 5 + .evergreen/scripts/install-dependencies.sh | 6 + .evergreen/scripts/make-files-executable.sh | 8 + .evergreen/scripts/prepare-resources.sh | 12 + .evergreen/scripts/run-atlas-tests.sh | 7 + .evergreen/scripts/run-aws-ecs-auth-test.sh | 15 + .evergreen/scripts/run-doctests.sh | 4 + .../scripts/run-enterprise-auth-tests.sh | 6 + .evergreen/scripts/run-gcpkms-fail-test.sh | 7 + .evergreen/scripts/run-getdata.sh | 22 + .evergreen/scripts/run-load-balancer.sh | 3 + .evergreen/scripts/run-mockupdb-tests.sh | 5 + .../{ => scripts}/run-mod-wsgi-tests.sh | 2 +- .../{ => scripts}/run-mongodb-aws-test.sh | 10 +- .evergreen/scripts/run-ocsp-test.sh | 8 + .evergreen/scripts/run-perf-tests.sh | 4 + .evergreen/scripts/run-tests.sh | 55 ++ .evergreen/scripts/run-with-env.sh | 21 + .evergreen/scripts/setup-encryption.sh | 5 + .evergreen/scripts/setup-tests.sh | 27 + .evergreen/scripts/stop-load-balancer.sh | 5 + .evergreen/scripts/teardown-aws.sh | 7 + .evergreen/scripts/teardown-docker.sh | 7 + .evergreen/scripts/upload-coverage-report.sh | 3 + .evergreen/scripts/windows-fix.sh | 11 + .evergreen/setup-encryption.sh | 7 +- .evergreen/utils.sh | 4 +- test/asynchronous/test_client_context.py | 8 +- test/mod_wsgi_test/README.rst | 2 +- test/test_client_context.py | 8 +- 40 files changed, 683 insertions(+), 430 deletions(-) create mode 100644 .evergreen/scripts/archive-mongodb-logs.sh create mode 100644 .evergreen/scripts/bootstrap-mongo-orchestration.sh create mode 100644 .evergreen/scripts/check-import-time.sh create mode 100644 .evergreen/scripts/cleanup.sh create mode 100644 .evergreen/scripts/download-and-merge-coverage.sh create mode 100644 .evergreen/scripts/fix-absolute-paths.sh create mode 100644 .evergreen/scripts/init-test-results.sh create mode 100644 .evergreen/scripts/install-dependencies.sh create mode 100644 .evergreen/scripts/make-files-executable.sh create mode 100644 .evergreen/scripts/prepare-resources.sh create mode 100644 .evergreen/scripts/run-atlas-tests.sh create mode 100644 .evergreen/scripts/run-aws-ecs-auth-test.sh create mode 100644 .evergreen/scripts/run-doctests.sh create mode 100644 .evergreen/scripts/run-enterprise-auth-tests.sh create mode 100644 .evergreen/scripts/run-gcpkms-fail-test.sh create mode 100644 .evergreen/scripts/run-getdata.sh create mode 100644 .evergreen/scripts/run-load-balancer.sh create mode 100644 .evergreen/scripts/run-mockupdb-tests.sh rename .evergreen/{ => scripts}/run-mod-wsgi-tests.sh (97%) rename .evergreen/{ => scripts}/run-mongodb-aws-test.sh (67%) create mode 100644 .evergreen/scripts/run-ocsp-test.sh create mode 100644 .evergreen/scripts/run-perf-tests.sh create mode 100644 .evergreen/scripts/run-tests.sh create mode 100644 .evergreen/scripts/run-with-env.sh create mode 100644 .evergreen/scripts/setup-encryption.sh create mode 100644 .evergreen/scripts/setup-tests.sh create mode 100644 .evergreen/scripts/stop-load-balancer.sh create mode 100644 .evergreen/scripts/teardown-aws.sh create mode 100644 .evergreen/scripts/teardown-docker.sh create mode 100644 .evergreen/scripts/upload-coverage-report.sh create mode 100644 .evergreen/scripts/windows-fix.sh diff --git a/.evergreen/config.yml b/.evergreen/config.yml index 1e4996c288..59b8a543fd 100644 --- a/.evergreen/config.yml +++ b/.evergreen/config.yml @@ -20,10 +20,9 @@ exec_timeout_secs: 3600 # 60 minutes is the longest we'll ever run (primarily # What to do when evergreen hits the timeout (`post:` tasks are run automatically) timeout: - - command: shell.exec + - command: subprocess.exec params: - script: | - ls -la + binary: ls -la include: - filename: .evergreen/generated_configs/tasks.yml @@ -41,7 +40,7 @@ functions: # Make an evergreen expansion file with dynamic values - command: subprocess.exec params: - include_expansions_in_env: ["is_patch", "project", "version_id"] + include_expansions_in_env: ["is_patch", "project", "version_id", "AUTH", "SSL", "test_encryption", "test_encryption_pyopenssl", "test_crypt_shared", "test_pyopenssl", "SETDEFAULTENCODING", "test_loadbalancer", "test_serverless", "SKIP_CSOT_TESTS", "MONGODB_STARTED", "DISABLE_TEST_COMMANDS", "GREEN_FRAMEWORK", "NO_EXT", "COVERAGE", "COMPRESSORS", "TEST_SUITES", "MONGODB_API_VERSION", "SKIP_HATCH", "skip_crypt_shared", "VERSION", "TOPOLOGY", "STORAGE_ENGINE", "ORCHESTRATION_FILE", "REQUIRE_API_VERSION", "LOAD_BALANCER", "skip_web_identity_auth_test", "skip_ECS_auth_test"] binary: bash working_dir: "src" args: @@ -52,19 +51,11 @@ functions: file: src/expansion.yml "prepare resources": - - command: shell.exec - params: - script: | - . src/.evergreen/scripts/env.sh - set -o xtrace - rm -rf $DRIVERS_TOOLS - if [ "$PROJECT" = "drivers-tools" ]; then - # If this was a patch build, doing a fresh clone would not actually test the patch - cp -R ${PROJECT_DIRECTORY}/ ${DRIVERS_TOOLS} - else - git clone https://github.com/mongodb-labs/drivers-evergreen-tools.git ${DRIVERS_TOOLS} - fi - echo "{ \"releases\": { \"default\": \"$MONGODB_BINARIES\" }}" > $MONGO_ORCHESTRATION_HOME/orchestration.config + - command: subprocess.exec + params: + binary: bash + args: + - src/.evergreen/scripts/prepare-resources.sh "upload coverage" : - command: ec2.assume_role @@ -88,14 +79,17 @@ functions: - command: ec2.assume_role params: role_arn: ${assume_role_arn} - - command: shell.exec + - command: subprocess.exec params: silent: true + binary: bash working_dir: "src" include_expansions_in_env: ["AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN"] - script: | - # Download all the task coverage files. - aws s3 cp --recursive s3://${bucket_name}/coverage/${revision}/${version_id}/coverage/ coverage/ + args: + - .evergreen/scripts/download-and-merge-coverage.sh + - ${bucket_name} + - ${revision} + - ${version_id} - command: subprocess.exec params: working_dir: "src" @@ -103,13 +97,17 @@ functions: args: - .evergreen/combine-coverage.sh # Upload the resulting html coverage report. - - command: shell.exec + - command: subprocess.exec params: silent: true + binary: bash working_dir: "src" include_expansions_in_env: ["AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN"] - script: | - aws s3 cp htmlcov/ s3://${bucket_name}/coverage/${revision}/${version_id}/htmlcov/ --recursive --acl public-read --region us-east-1 + args: + - .evergreen/scripts/upload-coverage-report.sh + - ${bucket_name} + - ${revision} + - ${version_id} # Attach the index.html with s3.put so it shows up in the Evergreen UI. - command: s3.put params: @@ -128,15 +126,6 @@ functions: - command: ec2.assume_role params: role_arn: ${assume_role_arn} - - command: shell.exec - params: - script: | - . src/.evergreen/scripts/env.sh - set -o xtrace - mkdir out_dir - find $MONGO_ORCHESTRATION_HOME -name \*.log -exec sh -c 'x="{}"; mv $x $PWD/out_dir/$(basename $(dirname $x))_$(basename $x)' \; - tar zcvf mongodb-logs.tar.gz -C out_dir/ . - rm -rf out_dir - command: archive.targz_pack params: target: "mongo-coredumps.tgz" @@ -161,23 +150,12 @@ functions: aws_key: ${AWS_ACCESS_KEY_ID} aws_secret: ${AWS_SECRET_ACCESS_KEY} aws_session_token: ${AWS_SESSION_TOKEN} - local_file: mongodb-logs.tar.gz - remote_file: ${build_variant}/${revision}/${version_id}/${build_id}/logs/${task_id}-${execution}-mongodb-logs.tar.gz + local_file: ${DRIVERS_TOOLS}/.evergreen/test_logs.tar.gz + remote_file: ${build_variant}/${revision}/${version_id}/${build_id}/logs/${task_id}-${execution}-drivers-tools-logs.tar.gz bucket: ${bucket_name} permissions: public-read content_type: ${content_type|application/x-gzip} - display_name: "mongodb-logs.tar.gz" - - command: s3.put - params: - aws_key: ${AWS_ACCESS_KEY_ID} - aws_secret: ${AWS_SECRET_ACCESS_KEY} - aws_session_token: ${AWS_SESSION_TOKEN} - local_file: drivers-tools/.evergreen/orchestration/server.log - remote_file: ${build_variant}/${revision}/${version_id}/${build_id}/logs/${task_id}-${execution}-orchestration.log - bucket: ${bucket_name} - permissions: public-read - content_type: ${content_type|text/plain} - display_name: "orchestration.log" + display_name: "drivers-tools-logs.tar.gz" "upload working dir": - command: ec2.assume_role @@ -230,54 +208,13 @@ functions: file: "src/xunit-results/TEST-*.xml" "bootstrap mongo-orchestration": - - command: shell.exec - params: - script: | - . src/.evergreen/scripts/env.sh - set -o xtrace - - # Enable core dumps if enabled on the machine - # Copied from https://github.com/mongodb/mongo/blob/master/etc/evergreen.yml - if [ -f /proc/self/coredump_filter ]; then - # Set the shell process (and its children processes) to dump ELF headers (bit 4), - # anonymous shared mappings (bit 1), and anonymous private mappings (bit 0). - echo 0x13 > /proc/self/coredump_filter - - if [ -f /sbin/sysctl ]; then - # Check that the core pattern is set explicitly on our distro image instead - # of being the OS's default value. This ensures that coredump names are consistent - # across distros and can be picked up by Evergreen. - core_pattern=$(/sbin/sysctl -n "kernel.core_pattern") - if [ "$core_pattern" = "dump_%e.%p.core" ]; then - echo "Enabling coredumps" - ulimit -c unlimited - fi - fi - fi - - if [ $(uname -s) = "Darwin" ]; then - core_pattern_mac=$(/usr/sbin/sysctl -n "kern.corefile") - if [ "$core_pattern_mac" = "dump_%N.%P.core" ]; then - echo "Enabling coredumps" - ulimit -c unlimited - fi - fi - - if [ -n "${skip_crypt_shared}" ]; then - export SKIP_CRYPT_SHARED=1 - fi - - MONGODB_VERSION=${VERSION} \ - TOPOLOGY=${TOPOLOGY} \ - AUTH=${AUTH} \ - SSL=${SSL} \ - STORAGE_ENGINE=${STORAGE_ENGINE} \ - DISABLE_TEST_COMMANDS=${DISABLE_TEST_COMMANDS} \ - ORCHESTRATION_FILE=${ORCHESTRATION_FILE} \ - REQUIRE_API_VERSION=${REQUIRE_API_VERSION} \ - LOAD_BALANCER=${LOAD_BALANCER} \ - bash ${DRIVERS_TOOLS}/.evergreen/run-orchestration.sh - # run-orchestration generates expansion file with the MONGODB_URI for the cluster + - command: subprocess.exec + params: + binary: bash + include_expansions_in_env: ["VERSION", "TOPOLOGY", "AUTH", "SSL", "ORCHESTRATION_FILE", "LOAD_BALANCER"] + args: + - src/.evergreen/scripts/run-with-env.sh + - src/.evergreen/scripts/bootstrap-mongo-orchestration.sh - command: expansions.update params: file: mo-expansion.yml @@ -288,167 +225,107 @@ functions: value: "1" "bootstrap data lake": - - command: shell.exec + - command: subprocess.exec type: setup params: - script: | - . src/.evergreen/scripts/env.sh - bash ${DRIVERS_TOOLS}/.evergreen/atlas_data_lake/pull-mongohouse-image.sh - - command: shell.exec + binary: bash + args: + - ${DRIVERS_TOOLS}/.evergreen/atlas_data_lake/pull-mongohouse-image.sh + - command: subprocess.exec type: setup params: - script: | - . src/.evergreen/scripts/env.sh - bash ${DRIVERS_TOOLS}/.evergreen/atlas_data_lake/run-mongohouse-image.sh - sleep 1 - docker ps + binary: bash + args: + - ${DRIVERS_TOOLS}/.evergreen/atlas_data_lake/run-mongohouse-image.sh "stop mongo-orchestration": - - command: shell.exec + - command: subprocess.exec params: - script: | - . src/.evergreen/scripts/env.sh - set -o xtrace - bash ${DRIVERS_TOOLS}/.evergreen/stop-orchestration.sh + binary: bash + args: + - ${DRIVERS_TOOLS}/.evergreen/stop-orchestration.sh "run mod_wsgi tests": - - command: shell.exec + - command: subprocess.exec type: test params: + include_expansions_in_env: [MOD_WSGI_VERSION, MOD_WSGI_EMBEDDED, "PYTHON_BINARY"] working_dir: "src" - script: | - . .evergreen/scripts/env.sh - set -o xtrace - PYTHON_BINARY=${PYTHON_BINARY} MOD_WSGI_VERSION=${MOD_WSGI_VERSION} \ - MOD_WSGI_EMBEDDED=${MOD_WSGI_EMBEDDED} PROJECT_DIRECTORY=${PROJECT_DIRECTORY} \ - bash ${PROJECT_DIRECTORY}/.evergreen/run-mod-wsgi-tests.sh + binary: bash + args: + - .evergreen/scripts/run-with-env.sh + - .evergreen/scripts/run-mod-wsgi-tests.sh "run mockupdb tests": - - command: shell.exec + - command: subprocess.exec type: test params: + include_expansions_in_env: ["PYTHON_BINARY"] working_dir: "src" - script: | - . .evergreen/scripts/env.sh - set -o xtrace - export PYTHON_BINARY=${PYTHON_BINARY} - bash ${PROJECT_DIRECTORY}/.evergreen/hatch.sh test:test-mockupdb + binary: bash + args: + - .evergreen/scripts/run-with-env.sh + - .evergreen/scripts/run-mockupdb-tests.sh "run doctests": - - command: shell.exec + - command: subprocess.exec type: test params: + include_expansions_in_env: [ "PYTHON_BINARY" ] working_dir: "src" - script: | - . .evergreen/scripts/env.sh - set -o xtrace - PYTHON_BINARY=${PYTHON_BINARY} bash ${PROJECT_DIRECTORY}/.evergreen/hatch.sh doctest:test + binary: bash + args: + - .evergreen/scripts/run-with-env.sh + - .evergreen/scripts/run-doctests.sh "run tests": - - command: shell.exec + - command: subprocess.exec params: + include_expansions_in_env: ["TEST_DATA_LAKE", "AUTH", "SSL", "TEST_INDEX_MANAGEMENT", "CRYPT_SHARED_LIB_PATH", "test_encryption", "test_encryption_pyopenssl", "test_crypt_shared", "test_pyopenssl", "test_loadbalancer", "test_serverless", "ORCHESTRATION_FILE"] + binary: bash working_dir: "src" - shell: bash - background: true - include_expansions_in_env: ["AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN"] - script: | - . .evergreen/scripts/env.sh - if [ -n "${test_encryption}" ]; then - ./.evergreen/hatch.sh encryption:setup - fi - - command: shell.exec + args: + - .evergreen/scripts/setup-tests.sh + - command: subprocess.exec + params: + working_dir: "src" + binary: bash + background: true + include_expansions_in_env: ["AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN"] + args: + - .evergreen/scripts/run-with-env.sh + - .evergreen/scripts/setup-encryption.sh + - command: subprocess.exec type: test params: working_dir: "src" - shell: bash - script: | - # Disable xtrace - set +x - . .evergreen/scripts/env.sh - if [ -n "${MONGODB_STARTED}" ]; then - export PYMONGO_MUST_CONNECT=true - fi - if [ -n "${DISABLE_TEST_COMMANDS}" ]; then - export PYMONGO_DISABLE_TEST_COMMANDS=1 - fi - if [ -n "${test_encryption}" ]; then - # Disable xtrace (just in case it was accidentally set). - set +x - bash ${DRIVERS_TOOLS}/.evergreen/csfle/await-servers.sh - export TEST_ENCRYPTION=1 - if [ -n "${test_encryption_pyopenssl}" ]; then - export TEST_ENCRYPTION_PYOPENSSL=1 - fi - fi - if [ -n "${test_crypt_shared}" ]; then - export TEST_CRYPT_SHARED=1 - export CRYPT_SHARED_LIB_PATH=${CRYPT_SHARED_LIB_PATH} - fi - if [ -n "${test_pyopenssl}" ]; then - export TEST_PYOPENSSL=1 - fi - if [ -n "${SETDEFAULTENCODING}" ]; then - export SETDEFAULTENCODING="${SETDEFAULTENCODING}" - fi - if [ -n "${test_loadbalancer}" ]; then - export TEST_LOADBALANCER=1 - export SINGLE_MONGOS_LB_URI="${SINGLE_MONGOS_LB_URI}" - export MULTI_MONGOS_LB_URI="${MULTI_MONGOS_LB_URI}" - fi - if [ -n "${test_serverless}" ]; then - export TEST_SERVERLESS=1 - fi - if [ -n "${TEST_INDEX_MANAGEMENT}" ]; then - export TEST_INDEX_MANAGEMENT=1 - fi - if [ -n "${SKIP_CSOT_TESTS}" ]; then - export SKIP_CSOT_TESTS=1 - fi - - GREEN_FRAMEWORK=${GREEN_FRAMEWORK} \ - PYTHON_BINARY=${PYTHON_BINARY} \ - NO_EXT=${NO_EXT} \ - COVERAGE=${COVERAGE} \ - COMPRESSORS=${COMPRESSORS} \ - AUTH=${AUTH} \ - SSL=${SSL} \ - TEST_DATA_LAKE=${TEST_DATA_LAKE} \ - TEST_SUITES=${TEST_SUITES} \ - MONGODB_API_VERSION=${MONGODB_API_VERSION} \ - SKIP_HATCH=${SKIP_HATCH} \ - bash ${PROJECT_DIRECTORY}/.evergreen/hatch.sh test:test-eg + binary: bash + include_expansions_in_env: ["AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN", "PYTHON_BINARY", "TEST_DATA_LAKE", "TEST_INDEX_MANAGEMENT", "CRYPT_SHARED_LIB_PATH", "SINGLE_MONGOS_LB_URI", "MULTI_MONGOS_LB_URI", "TEST_SUITES"] + args: + - .evergreen/scripts/run-with-env.sh + - .evergreen/scripts/run-tests.sh "run enterprise auth tests": - - command: shell.exec + - command: subprocess.exec type: test params: + binary: bash working_dir: "src" - include_expansions_in_env: ["DRIVERS_TOOLS", "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN"] - script: | - # Disable xtrace for security reasons (just in case it was accidentally set). - set +x - bash ${DRIVERS_TOOLS}/.evergreen/auth_aws/setup_secrets.sh drivers/enterprise_auth - PROJECT_DIRECTORY="${PROJECT_DIRECTORY}" \ - PYTHON_BINARY="${PYTHON_BINARY}" \ - TEST_ENTERPRISE_AUTH=1 \ - AUTH=auth \ - bash ${PROJECT_DIRECTORY}/.evergreen/hatch.sh test:test-eg + include_expansions_in_env: ["AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN", "PYTHON_BINARY"] + args: + - .evergreen/scripts/run-with-env.sh + - .evergreen/scripts/run-enterprise-auth-tests.sh "run atlas tests": - - command: shell.exec + - command: subprocess.exec type: test params: - include_expansions_in_env: ["DRIVERS_TOOLS", "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN"] + binary: bash + include_expansions_in_env: ["AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN", "PYTHON_BINARY"] working_dir: "src" - script: | - # Disable xtrace for security reasons (just in case it was accidentally set). - set +x - set -o errexit - bash ${DRIVERS_TOOLS}/.evergreen/auth_aws/setup_secrets.sh drivers/atlas_connect - PROJECT_DIRECTORY="${PROJECT_DIRECTORY}" \ - PYTHON_BINARY="${PYTHON_BINARY}" \ - TEST_ATLAS=1 \ - bash ${PROJECT_DIRECTORY}/.evergreen/hatch.sh test:test-eg + args: + - .evergreen/scripts/run-with-env.sh + - .evergreen/scripts/run-atlas-tests.sh "get aws auth secrets": - command: subprocess.exec @@ -460,57 +337,140 @@ functions: - ${DRIVERS_TOOLS}/.evergreen/auth_aws/setup-secrets.sh "run aws auth test with regular aws credentials": - - command: shell.exec + - command: subprocess.exec + params: + include_expansions_in_env: ["TEST_DATA_LAKE", "TEST_INDEX_MANAGEMENT", "CRYPT_SHARED_LIB_PATH", "test_encryption", "test_encryption_pyopenssl", "test_crypt_shared", "test_pyopenssl", "test_loadbalancer", "test_serverless", "ORCHESTRATION_FILE"] + binary: bash + working_dir: "src" + args: + - .evergreen/scripts/setup-tests.sh + - command: subprocess.exec type: test params: - shell: "bash" + include_expansions_in_env: ["DRIVERS_TOOLS", "skip_EC2_auth_test"] + binary: bash working_dir: "src" - script: | - . .evergreen/scripts/env.sh - .evergreen/run-mongodb-aws-test.sh regular + args: + - .evergreen/scripts/run-with-env.sh + - .evergreen/scripts/run-mongodb-aws-test.sh + - regular "run aws auth test with assume role credentials": - - command: shell.exec + - command: subprocess.exec + params: + include_expansions_in_env: [ "TEST_DATA_LAKE", "TEST_INDEX_MANAGEMENT", "CRYPT_SHARED_LIB_PATH", "test_encryption", "test_encryption_pyopenssl", "test_crypt_shared", "test_pyopenssl", "test_loadbalancer", "test_serverless", "ORCHESTRATION_FILE" ] + binary: bash + working_dir: "src" + args: + - .evergreen/scripts/setup-tests.sh + - command: subprocess.exec type: test params: - shell: "bash" + include_expansions_in_env: ["DRIVERS_TOOLS", "skip_EC2_auth_test"] + binary: bash working_dir: "src" - script: | - . .evergreen/scripts/env.sh - .evergreen/run-mongodb-aws-test.sh assume-role + args: + - .evergreen/scripts/run-with-env.sh + - .evergreen/scripts/run-mongodb-aws-test.sh + - assume-role "run aws auth test with aws EC2 credentials": - - command: shell.exec + - command: subprocess.exec + params: + include_expansions_in_env: [ "TEST_DATA_LAKE", "TEST_INDEX_MANAGEMENT", "CRYPT_SHARED_LIB_PATH", "test_encryption", "test_encryption_pyopenssl", "test_crypt_shared", "test_pyopenssl", "test_loadbalancer", "test_serverless", "ORCHESTRATION_FILE" ] + binary: bash + working_dir: "src" + args: + - .evergreen/scripts/setup-tests.sh + - command: subprocess.exec type: test params: + include_expansions_in_env: ["DRIVERS_TOOLS", "skip_EC2_auth_test"] + binary: bash working_dir: "src" - shell: "bash" - script: | - if [ "${skip_EC2_auth_test}" = "true" ]; then - echo "This platform does not support the EC2 auth test, skipping..." - exit 0 - fi - . .evergreen/scripts/env.sh - .evergreen/run-mongodb-aws-test.sh ec2 + args: + - .evergreen/scripts/run-with-env.sh + - .evergreen/scripts/run-mongodb-aws-test.sh + - ec2 "run aws auth test with aws web identity credentials": - - command: shell.exec + - command: subprocess.exec + params: + include_expansions_in_env: [ "TEST_DATA_LAKE", "TEST_INDEX_MANAGEMENT", "CRYPT_SHARED_LIB_PATH", "test_encryption", "test_encryption_pyopenssl", "test_crypt_shared", "test_pyopenssl", "test_loadbalancer", "test_serverless", "ORCHESTRATION_FILE" ] + binary: bash + working_dir: "src" + args: + - .evergreen/scripts/setup-tests.sh + - # Test with and without AWS_ROLE_SESSION_NAME set. + - command: subprocess.exec + type: test + params: + include_expansions_in_env: ["DRIVERS_TOOLS", "skip_EC2_auth_test"] + binary: bash + working_dir: "src" + args: + - .evergreen/scripts/run-with-env.sh + - .evergreen/scripts/run-mongodb-aws-test.sh + - web-identity + - command: subprocess.exec + type: test + params: + include_expansions_in_env: [ "DRIVERS_TOOLS", "skip_EC2_auth_test" ] + binary: bash + working_dir: "src" + env: + AWS_ROLE_SESSION_NAME: test + args: + - .evergreen/scripts/run-with-env.sh + - .evergreen/scripts/run-mongodb-aws-test.sh + - web-identity + + "run aws auth test with aws credentials as environment variables": + - command: subprocess.exec + params: + include_expansions_in_env: [ "TEST_DATA_LAKE", "TEST_INDEX_MANAGEMENT", "CRYPT_SHARED_LIB_PATH", "test_encryption", "test_encryption_pyopenssl", "test_crypt_shared", "test_pyopenssl", "test_loadbalancer", "test_serverless", "ORCHESTRATION_FILE" ] + binary: bash + working_dir: "src" + args: + - .evergreen/scripts/setup-tests.sh + - command: subprocess.exec type: test params: + include_expansions_in_env: ["DRIVERS_TOOLS", "skip_EC2_auth_test"] + binary: bash working_dir: "src" - shell: "bash" - script: | - if [ "${skip_EC2_auth_test}" = "true" ]; then - echo "This platform does not support the web identity auth test, skipping..." - exit 0 - fi - . .evergreen/scripts/env.sh - # Test with and without AWS_ROLE_SESSION_NAME set. - .evergreen/run-mongodb-aws-test.sh web-identity - AWS_ROLE_SESSION_NAME="test" \ - .evergreen/run-mongodb-aws-test.sh web-identity + args: + - .evergreen/scripts/run-with-env.sh + - .evergreen/scripts/run-mongodb-aws-test.sh + - env-creds + + "run aws auth test with aws credentials and session token as environment variables": + - command: subprocess.exec + params: + include_expansions_in_env: [ "TEST_DATA_LAKE", "TEST_INDEX_MANAGEMENT", "CRYPT_SHARED_LIB_PATH", "test_encryption", "test_encryption_pyopenssl", "test_crypt_shared", "test_pyopenssl", "test_loadbalancer", "test_serverless", "ORCHESTRATION_FILE" ] + binary: bash + working_dir: "src" + args: + - .evergreen/scripts/setup-tests.sh + - command: subprocess.exec + type: test + params: + include_expansions_in_env: ["DRIVERS_TOOLS", "skip_EC2_auth_test"] + binary: bash + working_dir: "src" + args: + - .evergreen/scripts/run-with-env.sh + - .evergreen/scripts/run-mongodb-aws-test.sh + - session-creds "run oidc auth test with test credentials": + - command: subprocess.exec + params: + include_expansions_in_env: [ "TEST_DATA_LAKE", "TEST_INDEX_MANAGEMENT", "CRYPT_SHARED_LIB_PATH", "test_encryption", "test_encryption_pyopenssl", "test_crypt_shared", "test_pyopenssl", "test_loadbalancer", "test_serverless", "ORCHESTRATION_FILE" ] + binary: bash + working_dir: "src" + args: + - .evergreen/scripts/setup-tests.sh - command: subprocess.exec type: test params: @@ -532,110 +492,69 @@ functions: args: - ${PROJECT_DIRECTORY}/.evergreen/run-mongodb-oidc-remote-test.sh - "run aws auth test with aws credentials as environment variables": - - command: shell.exec - type: test - params: - working_dir: "src" - shell: bash - script: | - . .evergreen/scripts/env.sh - .evergreen/run-mongodb-aws-test.sh env-creds - - "run aws auth test with aws credentials and session token as environment variables": - - command: shell.exec + "run aws ECS auth test": + - command: subprocess.exec type: test params: + binary: bash working_dir: "src" - shell: bash - script: | - . .evergreen/scripts/env.sh - .evergreen/run-mongodb-aws-test.sh session-creds + args: + - .evergreen/scripts/run-with-env.sh + - .evergreen/scripts/run-aws-ecs-auth-test.sh - "run aws ECS auth test": - - command: shell.exec - type: test + "cleanup": + - command: subprocess.exec params: - shell: "bash" + binary: bash working_dir: "src" - script: | - if [ "${skip_ECS_auth_test}" = "true" ]; then - echo "This platform does not support the ECS auth test, skipping..." - exit 0 - fi - . .evergreen/scripts/env.sh - set -ex - cd ${DRIVERS_TOOLS}/.evergreen/auth_aws - . ./activate-authawsvenv.sh - . aws_setup.sh ecs - export MONGODB_BINARIES="$MONGODB_BINARIES"; - export PROJECT_DIRECTORY="${PROJECT_DIRECTORY}"; - python aws_tester.py ecs - cd - + args: + - .evergreen/scripts/run-with-env.sh + - .evergreen/scripts/cleanup.sh - "cleanup": - - command: shell.exec + "teardown": + - command: subprocess.exec params: + binary: bash working_dir: "src" - script: | - . .evergreen/scripts/env.sh - if [ -f $DRIVERS_TOOLS/.evergreen/csfle/secrets-export.sh ]; then - . .evergreen/hatch.sh encryption:teardown - fi - rm -rf ${DRIVERS_TOOLS} || true - rm -f ./secrets-export.sh || true + args: + - ${DRIVERS_TOOLS}/.evergreen/teardown.sh "fix absolute paths": - - command: shell.exec + - command: subprocess.exec params: - script: | - set +x - . src/.evergreen/scripts/env.sh - for filename in $(find ${DRIVERS_TOOLS} -name \*.json); do - perl -p -i -e "s|ABSOLUTE_PATH_REPLACEMENT_TOKEN|${DRIVERS_TOOLS}|g" $filename - done + binary: bash + args: + - src/.evergreen/scripts/fix-absolute-paths.sh "windows fix": - - command: shell.exec - params: - script: | - set +x - . src/.evergreen/scripts/env.sh - for i in $(find ${DRIVERS_TOOLS}/.evergreen ${PROJECT_DIRECTORY}/.evergreen -name \*.sh); do - cat $i | tr -d '\r' > $i.new - mv $i.new $i - done - # Copy client certificate because symlinks do not work on Windows. - cp ${DRIVERS_TOOLS}/.evergreen/x509gen/client.pem $MONGO_ORCHESTRATION_HOME/lib/client.pem + - command: subprocess.exec + params: + binary: bash + args: + - src/.evergreen/scripts/windows-fix.sh "make files executable": - - command: shell.exec + - command: subprocess.exec params: - script: | - set +x - . src/.evergreen/scripts/env.sh - for i in $(find ${DRIVERS_TOOLS}/.evergreen ${PROJECT_DIRECTORY}/.evergreen -name \*.sh); do - chmod +x $i - done + binary: bash + args: + - src/.evergreen/scripts/make-files-executable.sh "init test-results": - - command: shell.exec + - command: subprocess.exec params: - script: | - set +x - . src/.evergreen/scripts/env.sh - echo '{"results": [{ "status": "FAIL", "test_file": "Build", "log_raw": "No test-results.json found was created" } ]}' > ${PROJECT_DIRECTORY}/test-results.json + binary: bash + args: + - src/.evergreen/scripts/init-test-results.sh "install dependencies": - - command: shell.exec + - command: subprocess.exec params: + binary: bash working_dir: "src" - script: | - . .evergreen/scripts/env.sh - set -o xtrace - file="${PROJECT_DIRECTORY}/.evergreen/install-dependencies.sh" - # Don't use ${file} syntax here because evergreen treats it as an empty expansion. - [ -f "$file" ] && bash $file || echo "$file not available, skipping" + args: + - .evergreen/scripts/run-with-env.sh + - .evergreen/scripts/install-dependencies.sh "assume ec2 role": - command: ec2.assume_role @@ -657,18 +576,22 @@ functions: file: atlas-expansion.yml "run-ocsp-test": - - command: shell.exec + - command: subprocess.exec + params: + include_expansions_in_env: [ "TEST_DATA_LAKE", "TEST_INDEX_MANAGEMENT", "CRYPT_SHARED_LIB_PATH", "test_encryption", "test_encryption_pyopenssl", "test_crypt_shared", "test_pyopenssl", "test_loadbalancer", "test_serverless", "ORCHESTRATION_FILE" ] + binary: bash + working_dir: "src" + args: + - .evergreen/scripts/setup-tests.sh + - command: subprocess.exec type: test params: + include_expansions_in_env: ["OCSP_ALGORITHM", "OCSP_TLS_SHOULD_SUCCEED", "PYTHON_BINARY"] + binary: bash working_dir: "src" - script: | - . .evergreen/scripts/env.sh - TEST_OCSP=1 \ - PYTHON_BINARY=${PYTHON_BINARY} \ - CA_FILE="${DRIVERS_TOOLS}/.evergreen/ocsp/${OCSP_ALGORITHM}/ca.pem" \ - OCSP_TLS_SHOULD_SUCCEED="${OCSP_TLS_SHOULD_SUCCEED}" \ - bash ${PROJECT_DIRECTORY}/.evergreen/hatch.sh test:test-eg - bash ${DRIVERS_TOOLS}/.evergreen/ocsp/teardown.sh + args: + - .evergreen/scripts/run-with-env.sh + - .evergreen/scripts/run-ocsp-test.sh "run-ocsp-server": - command: subprocess.exec @@ -680,42 +603,38 @@ functions: - ${DRIVERS_TOOLS}/.evergreen/ocsp/setup.sh "run load-balancer": - - command: shell.exec + - command: subprocess.exec params: - script: | - DRIVERS_TOOLS=${DRIVERS_TOOLS} MONGODB_URI=${MONGODB_URI} bash ${DRIVERS_TOOLS}/.evergreen/run-load-balancer.sh start + binary: bash + include_expansions_in_env: ["MONGODB_URI"] + args: + - src/.evergreen/scripts/run-with-env.sh + - src/.evergreen/scripts/run-load-balancer.sh - command: expansions.update params: file: lb-expansion.yml "stop load-balancer": - - command: shell.exec + - command: subprocess.exec params: - script: | - cd ${DRIVERS_TOOLS}/.evergreen - DRIVERS_TOOLS=${DRIVERS_TOOLS} bash ${DRIVERS_TOOLS}/.evergreen/run-load-balancer.sh stop + binary: bash + args: + - src/.evergreen/scripts/stop-load-balancer.sh "teardown_docker": - - command: shell.exec + - command: subprocess.exec params: - script: | - # Remove all Docker images - DOCKER=$(command -v docker) || true - if [ -n "$DOCKER" ]; then - docker rmi -f $(docker images -a -q) &> /dev/null || true - fi + binary: bash + args: + - src/.evergreen/scripts/teardown-docker.sh "teardown_aws": - - command: shell.exec + - command: subprocess.exec params: - shell: "bash" - script: | - . src/.evergreen/scripts/env.sh - cd "${DRIVERS_TOOLS}/.evergreen/auth_aws" - if [ -f "./aws_e2e_setup.json" ]; then - . ./activate-authawsvenv.sh - python ./lib/aws_assign_instance_profile.py - fi + binary: bash + args: + - src/.evergreen/scripts/run-with-env.sh + - src/.evergreen/scripts/teardown-aws.sh "teardown atlas": - command: subprocess.exec @@ -725,13 +644,14 @@ functions: - ${DRIVERS_TOOLS}/.evergreen/atlas/teardown-atlas-cluster.sh "run perf tests": - - command: shell.exec + - command: subprocess.exec type: test params: working_dir: "src" - script: | - . .evergreen/scripts/env.sh - PROJECT_DIRECTORY=${PROJECT_DIRECTORY} bash ${PROJECT_DIRECTORY}/.evergreen/run-perf-tests.sh + binary: bash + args: + - .evergreen/scripts/run-with-env.sh + - .evergreen/scripts/run-perf-tests.sh "attach benchmark test results": - command: attach.results @@ -756,6 +676,7 @@ pre: post: # Disabled, causing timeouts # - func: "upload working dir" + - func: "teardown" - func: "upload coverage" - func: "upload mo artifacts" - func: "upload test results" @@ -798,13 +719,13 @@ task_groups: - func: make files executable - command: subprocess.exec params: - binary: "bash" + binary: bash args: - ${DRIVERS_TOOLS}/.evergreen/csfle/gcpkms/create-and-setup-instance.sh teardown_task: - command: subprocess.exec params: - binary: "bash" + binary: bash args: - ${DRIVERS_TOOLS}/.evergreen/csfle/gcpkms/delete-instance.sh - func: "upload test results" @@ -966,31 +887,12 @@ tasks: # Throw it here, and execute this task on all buildvariants - name: getdata commands: - - command: shell.exec + - command: subprocess.exec + binary: bash type: test params: - script: | - set -o xtrace - . ${DRIVERS_TOOLS}/.evergreen/download-mongodb.sh || true - get_distro || true - echo $DISTRO - echo $MARCH - echo $OS - uname -a || true - ls /etc/*release* || true - cc --version || true - gcc --version || true - clang --version || true - gcov --version || true - lcov --version || true - llvm-cov --version || true - echo $PATH - ls -la /usr/local/Cellar/llvm/*/bin/ || true - ls -la /usr/local/Cellar/ || true - scan-build --version || true - genhtml --version || true - valgrind --version || true - + args: + - src/.evergreen/scripts/run-getdata.sh # Standard test tasks {{{ - name: "mockupdb" @@ -1647,7 +1549,7 @@ tasks: type: setup params: working_dir: "src" - binary: "bash" + binary: bash include_expansions_in_env: ["DRIVERS_TOOLS"] args: - .evergreen/run-gcpkms-test.sh @@ -1660,17 +1562,14 @@ tasks: vars: VERSION: "latest" TOPOLOGY: "server" - - command: shell.exec + - command: subprocess.exec type: test params: + include_expansions_in_env: ["PYTHON_BINARY"] working_dir: "src" - shell: "bash" - script: | - . .evergreen/scripts/env.sh - export PYTHON_BINARY=/opt/mongodbtoolchain/v4/bin/python3 - export LIBMONGOCRYPT_URL=https://s3.amazonaws.com/mciuploads/libmongocrypt/debian11/master/latest/libmongocrypt.tar.gz - SKIP_SERVERS=1 bash ./.evergreen/setup-encryption.sh - SUCCESS=false TEST_FLE_GCP_AUTO=1 ./.evergreen/hatch.sh test:test-eg + binary: "bash" + args: + - .evergreen/scripts/run-gcpkms-fail-test.sh - name: testazurekms-task commands: @@ -1736,18 +1635,15 @@ tasks: - name: "check-import-time" tags: ["pr"] commands: - - command: shell.exec + - command: subprocess.exec type: test params: - shell: "bash" + binary: bash working_dir: src - script: | - . .evergreen/scripts/env.sh - set -x - export BASE_SHA=${revision} - export HEAD_SHA=${github_commit} - bash .evergreen/run-import-time-test.sh - + args: + - .evergreen/scripts/check-import-time.sh + - ${revision} + - ${github_commit} - name: "backport-pr" allowed_requesters: ["commit"] commands: diff --git a/.evergreen/hatch.sh b/.evergreen/hatch.sh index 45d5113cd6..98cd9ed734 100644 --- a/.evergreen/hatch.sh +++ b/.evergreen/hatch.sh @@ -29,7 +29,7 @@ else # Set up virtualenv before installing hatch # Ensure hatch does not write to user or global locations. touch hatch_config.toml HATCH_CONFIG=$(pwd)/hatch_config.toml - if [ "Windows_NT" = "$OS" ]; then # Magic variable in cygwin + if [ "Windows_NT" = "${OS:-}" ]; then # Magic variable in cygwin HATCH_CONFIG=$(cygpath -m "$HATCH_CONFIG") fi export HATCH_CONFIG diff --git a/.evergreen/run-tests.sh b/.evergreen/run-tests.sh index 1e03e27147..9716c1fc79 100755 --- a/.evergreen/run-tests.sh +++ b/.evergreen/run-tests.sh @@ -53,18 +53,18 @@ if [ -z "${NO_EXT:-}" ]; then fi if [ "$AUTH" != "noauth" ]; then - if [ ! -z "$TEST_DATA_LAKE" ]; then + if [ -n "$TEST_DATA_LAKE" ]; then export DB_USER="mhuser" export DB_PASSWORD="pencil" - elif [ ! -z "$TEST_SERVERLESS" ]; then - source ${DRIVERS_TOOLS}/.evergreen/serverless/secrets-export.sh + elif [ -n "$TEST_SERVERLESS" ]; then + source "${DRIVERS_TOOLS}"/.evergreen/serverless/secrets-export.sh export DB_USER=$SERVERLESS_ATLAS_USER export DB_PASSWORD=$SERVERLESS_ATLAS_PASSWORD export MONGODB_URI="$SERVERLESS_URI" echo "MONGODB_URI=$MONGODB_URI" export SINGLE_MONGOS_LB_URI=$MONGODB_URI export MULTI_MONGOS_LB_URI=$MONGODB_URI - elif [ ! -z "$TEST_AUTH_OIDC" ]; then + elif [ -n "$TEST_AUTH_OIDC" ]; then export DB_USER=$OIDC_ADMIN_USER export DB_PASSWORD=$OIDC_ADMIN_PWD export DB_IP="$MONGODB_URI" diff --git a/.evergreen/scripts/archive-mongodb-logs.sh b/.evergreen/scripts/archive-mongodb-logs.sh new file mode 100644 index 0000000000..70a337cd11 --- /dev/null +++ b/.evergreen/scripts/archive-mongodb-logs.sh @@ -0,0 +1,8 @@ +#!/bin/bash + +set -o xtrace +mkdir out_dir +# shellcheck disable=SC2156 +find "$MONGO_ORCHESTRATION_HOME" -name \*.log -exec sh -c 'x="{}"; mv $x $PWD/out_dir/$(basename $(dirname $x))_$(basename $x)' \; +tar zcvf mongodb-logs.tar.gz -C out_dir/ . +rm -rf out_dir diff --git a/.evergreen/scripts/bootstrap-mongo-orchestration.sh b/.evergreen/scripts/bootstrap-mongo-orchestration.sh new file mode 100644 index 0000000000..1d2b145de8 --- /dev/null +++ b/.evergreen/scripts/bootstrap-mongo-orchestration.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +set -o xtrace + +# Enable core dumps if enabled on the machine +# Copied from https://github.com/mongodb/mongo/blob/master/etc/evergreen.yml +if [ -f /proc/self/coredump_filter ]; then + # Set the shell process (and its children processes) to dump ELF headers (bit 4), + # anonymous shared mappings (bit 1), and anonymous private mappings (bit 0). + echo 0x13 >/proc/self/coredump_filter + + if [ -f /sbin/sysctl ]; then + # Check that the core pattern is set explicitly on our distro image instead + # of being the OS's default value. This ensures that coredump names are consistent + # across distros and can be picked up by Evergreen. + core_pattern=$(/sbin/sysctl -n "kernel.core_pattern") + if [ "$core_pattern" = "dump_%e.%p.core" ]; then + echo "Enabling coredumps" + ulimit -c unlimited + fi + fi +fi + +if [ "$(uname -s)" = "Darwin" ]; then + core_pattern_mac=$(/usr/sbin/sysctl -n "kern.corefile") + if [ "$core_pattern_mac" = "dump_%N.%P.core" ]; then + echo "Enabling coredumps" + ulimit -c unlimited + fi +fi + +if [ -n "${skip_crypt_shared}" ]; then + export SKIP_CRYPT_SHARED=1 +fi + +MONGODB_VERSION=${VERSION} \ + TOPOLOGY=${TOPOLOGY} \ + AUTH=${AUTH:-noauth} \ + SSL=${SSL:-nossl} \ + STORAGE_ENGINE=${STORAGE_ENGINE:-} \ + DISABLE_TEST_COMMANDS=${DISABLE_TEST_COMMANDS:-} \ + ORCHESTRATION_FILE=${ORCHESTRATION_FILE:-} \ + REQUIRE_API_VERSION=${REQUIRE_API_VERSION:-} \ + LOAD_BALANCER=${LOAD_BALANCER:-} \ + bash ${DRIVERS_TOOLS}/.evergreen/run-orchestration.sh +# run-orchestration generates expansion file with the MONGODB_URI for the cluster diff --git a/.evergreen/scripts/check-import-time.sh b/.evergreen/scripts/check-import-time.sh new file mode 100644 index 0000000000..cdd2025d59 --- /dev/null +++ b/.evergreen/scripts/check-import-time.sh @@ -0,0 +1,7 @@ +#!/bin/bash + +. .evergreen/scripts/env.sh +set -x +export BASE_SHA="$1" +export HEAD_SHA="$2" +bash .evergreen/run-import-time-test.sh diff --git a/.evergreen/scripts/cleanup.sh b/.evergreen/scripts/cleanup.sh new file mode 100644 index 0000000000..9e583e4f1e --- /dev/null +++ b/.evergreen/scripts/cleanup.sh @@ -0,0 +1,7 @@ +#!/bin/bash + +if [ -f "$DRIVERS_TOOLS"/.evergreen/csfle/secrets-export.sh ]; then + . .evergreen/hatch.sh encryption:teardown +fi +rm -rf "${DRIVERS_TOOLS}" || true +rm -f ./secrets-export.sh || true diff --git a/.evergreen/scripts/configure-env.sh b/.evergreen/scripts/configure-env.sh index 98d400037c..3c0a0436de 100644 --- a/.evergreen/scripts/configure-env.sh +++ b/.evergreen/scripts/configure-env.sh @@ -1,4 +1,4 @@ -#!/bin/bash -ex +#!/bin/bash -eux # Get the current unique version of this checkout # shellcheck disable=SC2154 @@ -29,7 +29,7 @@ fi export MONGO_ORCHESTRATION_HOME="$DRIVERS_TOOLS/.evergreen/orchestration" export MONGODB_BINARIES="$DRIVERS_TOOLS/mongodb/bin" -cat < $SCRIPT_DIR/env.sh +cat < "$SCRIPT_DIR"/env.sh set -o errexit export PROJECT_DIRECTORY="$PROJECT_DIRECTORY" export CURRENT_VERSION="$CURRENT_VERSION" @@ -38,6 +38,21 @@ export DRIVERS_TOOLS="$DRIVERS_TOOLS" export MONGO_ORCHESTRATION_HOME="$MONGO_ORCHESTRATION_HOME" export MONGODB_BINARIES="$MONGODB_BINARIES" export PROJECT_DIRECTORY="$PROJECT_DIRECTORY" +export SETDEFAULTENCODING="${SETDEFAULTENCODING:-}" +export SKIP_CSOT_TESTS="${SKIP_CSOT_TESTS:-}" +export MONGODB_STARTED="${MONGODB_STARTED:-}" +export DISABLE_TEST_COMMANDS="${DISABLE_TEST_COMMANDS:-}" +export GREEN_FRAMEWORK="${GREEN_FRAMEWORK:-}" +export NO_EXT="${NO_EXT:-}" +export COVERAGE="${COVERAGE:-}" +export COMPRESSORS="${COMPRESSORS:-}" +export MONGODB_API_VERSION="${MONGODB_API_VERSION:-}" +export SKIP_HATCH="${SKIP_HATCH:-}" +export skip_crypt_shared="${skip_crypt_shared:-}" +export STORAGE_ENGINE="${STORAGE_ENGINE:-}" +export REQUIRE_API_VERSION="${REQUIRE_API_VERSION:-}" +export skip_web_identity_auth_test="${skip_web_identity_auth_test:-}" +export skip_ECS_auth_test="${skip_ECS_auth_test:-}" export TMPDIR="$MONGO_ORCHESTRATION_HOME/db" export PATH="$MONGODB_BINARIES:$PATH" diff --git a/.evergreen/scripts/download-and-merge-coverage.sh b/.evergreen/scripts/download-and-merge-coverage.sh new file mode 100644 index 0000000000..808bb957ef --- /dev/null +++ b/.evergreen/scripts/download-and-merge-coverage.sh @@ -0,0 +1,4 @@ +#!/bin/bash + +# Download all the task coverage files. +aws s3 cp --recursive s3://"$1"/coverage/"$2"/"$3"/coverage/ coverage/ diff --git a/.evergreen/scripts/fix-absolute-paths.sh b/.evergreen/scripts/fix-absolute-paths.sh new file mode 100644 index 0000000000..eb9433c673 --- /dev/null +++ b/.evergreen/scripts/fix-absolute-paths.sh @@ -0,0 +1,8 @@ +#!/bin/bash + +set +x +. src/.evergreen/scripts/env.sh +# shellcheck disable=SC2044 +for filename in $(find $DRIVERS_TOOLS -name \*.json); do + perl -p -i -e "s|ABSOLUTE_PATH_REPLACEMENT_TOKEN|$DRIVERS_TOOLS|g" $filename +done diff --git a/.evergreen/scripts/init-test-results.sh b/.evergreen/scripts/init-test-results.sh new file mode 100644 index 0000000000..666ac60620 --- /dev/null +++ b/.evergreen/scripts/init-test-results.sh @@ -0,0 +1,5 @@ +#!/bin/bash + +set +x +. src/.evergreen/scripts/env.sh +echo '{"results": [{ "status": "FAIL", "test_file": "Build", "log_raw": "No test-results.json found was created" } ]}' >$PROJECT_DIRECTORY/test-results.json diff --git a/.evergreen/scripts/install-dependencies.sh b/.evergreen/scripts/install-dependencies.sh new file mode 100644 index 0000000000..ebcc8f3069 --- /dev/null +++ b/.evergreen/scripts/install-dependencies.sh @@ -0,0 +1,6 @@ +#!/bin/bash + +set -o xtrace +file="$PROJECT_DIRECTORY/.evergreen/install-dependencies.sh" +# Don't use ${file} syntax here because evergreen treats it as an empty expansion. +[ -f "$file" ] && bash "$file" || echo "$file not available, skipping" diff --git a/.evergreen/scripts/make-files-executable.sh b/.evergreen/scripts/make-files-executable.sh new file mode 100644 index 0000000000..806be7c599 --- /dev/null +++ b/.evergreen/scripts/make-files-executable.sh @@ -0,0 +1,8 @@ +#!/bin/bash + +set +x +. src/.evergreen/scripts/env.sh +# shellcheck disable=SC2044 +for i in $(find "$DRIVERS_TOOLS"/.evergreen "$PROJECT_DIRECTORY"/.evergreen -name \*.sh); do + chmod +x "$i" +done diff --git a/.evergreen/scripts/prepare-resources.sh b/.evergreen/scripts/prepare-resources.sh new file mode 100644 index 0000000000..33394b55ff --- /dev/null +++ b/.evergreen/scripts/prepare-resources.sh @@ -0,0 +1,12 @@ +#!/bin/bash + +. src/.evergreen/scripts/env.sh +set -o xtrace +rm -rf $DRIVERS_TOOLS +if [ "$PROJECT" = "drivers-tools" ]; then + # If this was a patch build, doing a fresh clone would not actually test the patch + cp -R $PROJECT_DIRECTORY/ $DRIVERS_TOOLS +else + git clone https://github.com/mongodb-labs/drivers-evergreen-tools.git $DRIVERS_TOOLS +fi +echo "{ \"releases\": { \"default\": \"$MONGODB_BINARIES\" }}" >$MONGO_ORCHESTRATION_HOME/orchestration.config diff --git a/.evergreen/scripts/run-atlas-tests.sh b/.evergreen/scripts/run-atlas-tests.sh new file mode 100644 index 0000000000..98a19f047f --- /dev/null +++ b/.evergreen/scripts/run-atlas-tests.sh @@ -0,0 +1,7 @@ +#!/bin/bash + +# Disable xtrace for security reasons (just in case it was accidentally set). +set +x +set -o errexit +bash "${DRIVERS_TOOLS}"/.evergreen/auth_aws/setup_secrets.sh drivers/atlas_connect +TEST_ATLAS=1 bash "${PROJECT_DIRECTORY}"/.evergreen/hatch.sh test:test-eg diff --git a/.evergreen/scripts/run-aws-ecs-auth-test.sh b/.evergreen/scripts/run-aws-ecs-auth-test.sh new file mode 100644 index 0000000000..787e0a710b --- /dev/null +++ b/.evergreen/scripts/run-aws-ecs-auth-test.sh @@ -0,0 +1,15 @@ +#!/bin/bash + +# shellcheck disable=SC2154 +if [ "${skip_ECS_auth_test}" = "true" ]; then + echo "This platform does not support the ECS auth test, skipping..." + exit 0 +fi +set -ex +cd "$DRIVERS_TOOLS"/.evergreen/auth_aws +. ./activate-authawsvenv.sh +. aws_setup.sh ecs +export MONGODB_BINARIES="$MONGODB_BINARIES" +export PROJECT_DIRECTORY="$PROJECT_DIRECTORY" +python aws_tester.py ecs +cd - diff --git a/.evergreen/scripts/run-doctests.sh b/.evergreen/scripts/run-doctests.sh new file mode 100644 index 0000000000..f7215ad347 --- /dev/null +++ b/.evergreen/scripts/run-doctests.sh @@ -0,0 +1,4 @@ +#!/bin/bash + +set -o xtrace +PYTHON_BINARY=${PYTHON_BINARY} bash "${PROJECT_DIRECTORY}"/.evergreen/hatch.sh doctest:test diff --git a/.evergreen/scripts/run-enterprise-auth-tests.sh b/.evergreen/scripts/run-enterprise-auth-tests.sh new file mode 100644 index 0000000000..31371ead45 --- /dev/null +++ b/.evergreen/scripts/run-enterprise-auth-tests.sh @@ -0,0 +1,6 @@ +#!/bin/bash + +# Disable xtrace for security reasons (just in case it was accidentally set). +set +x +bash "${DRIVERS_TOOLS}"/.evergreen/auth_aws/setup_secrets.sh drivers/enterprise_auth +TEST_ENTERPRISE_AUTH=1 AUTH=auth bash "${PROJECT_DIRECTORY}"/.evergreen/hatch.sh test:test-eg diff --git a/.evergreen/scripts/run-gcpkms-fail-test.sh b/.evergreen/scripts/run-gcpkms-fail-test.sh new file mode 100644 index 0000000000..dd9d522c8a --- /dev/null +++ b/.evergreen/scripts/run-gcpkms-fail-test.sh @@ -0,0 +1,7 @@ +#!/bin/bash + +. .evergreen/scripts/env.sh +export PYTHON_BINARY=/opt/mongodbtoolchain/v4/bin/python3 +export LIBMONGOCRYPT_URL=https://s3.amazonaws.com/mciuploads/libmongocrypt/debian11/master/latest/libmongocrypt.tar.gz +SKIP_SERVERS=1 bash ./.evergreen/setup-encryption.sh +SUCCESS=false TEST_FLE_GCP_AUTO=1 ./.evergreen/hatch.sh test:test-eg diff --git a/.evergreen/scripts/run-getdata.sh b/.evergreen/scripts/run-getdata.sh new file mode 100644 index 0000000000..b2d6ecb476 --- /dev/null +++ b/.evergreen/scripts/run-getdata.sh @@ -0,0 +1,22 @@ +#!/bin/bash + +set -o xtrace +. ${DRIVERS_TOOLS}/.evergreen/download-mongodb.sh || true +get_distro || true +echo $DISTRO +echo $MARCH +echo $OS +uname -a || true +ls /etc/*release* || true +cc --version || true +gcc --version || true +clang --version || true +gcov --version || true +lcov --version || true +llvm-cov --version || true +echo $PATH +ls -la /usr/local/Cellar/llvm/*/bin/ || true +ls -la /usr/local/Cellar/ || true +scan-build --version || true +genhtml --version || true +valgrind --version || true diff --git a/.evergreen/scripts/run-load-balancer.sh b/.evergreen/scripts/run-load-balancer.sh new file mode 100644 index 0000000000..7d431777e5 --- /dev/null +++ b/.evergreen/scripts/run-load-balancer.sh @@ -0,0 +1,3 @@ +#!/bin/bash + +MONGODB_URI=${MONGODB_URI} bash "${DRIVERS_TOOLS}"/.evergreen/run-load-balancer.sh start diff --git a/.evergreen/scripts/run-mockupdb-tests.sh b/.evergreen/scripts/run-mockupdb-tests.sh new file mode 100644 index 0000000000..8825a0237d --- /dev/null +++ b/.evergreen/scripts/run-mockupdb-tests.sh @@ -0,0 +1,5 @@ +#!/bin/bash + +set -o xtrace +export PYTHON_BINARY=${PYTHON_BINARY} +bash "${PROJECT_DIRECTORY}"/.evergreen/hatch.sh test:test-mockupdb diff --git a/.evergreen/run-mod-wsgi-tests.sh b/.evergreen/scripts/run-mod-wsgi-tests.sh similarity index 97% rename from .evergreen/run-mod-wsgi-tests.sh rename to .evergreen/scripts/run-mod-wsgi-tests.sh index e1f5238110..607458b8c6 100644 --- a/.evergreen/run-mod-wsgi-tests.sh +++ b/.evergreen/scripts/run-mod-wsgi-tests.sh @@ -28,7 +28,7 @@ export MOD_WSGI_SO=/opt/python/mod_wsgi/python_version/$PYTHON_VERSION/mod_wsgi_ export PYTHONHOME=/opt/python/$PYTHON_VERSION # If MOD_WSGI_EMBEDDED is set use the default embedded mode behavior instead # of daemon mode (WSGIDaemonProcess). -if [ -n "$MOD_WSGI_EMBEDDED" ]; then +if [ -n "${MOD_WSGI_EMBEDDED:-}" ]; then export MOD_WSGI_CONF=mod_wsgi_test_embedded.conf else export MOD_WSGI_CONF=mod_wsgi_test.conf diff --git a/.evergreen/run-mongodb-aws-test.sh b/.evergreen/scripts/run-mongodb-aws-test.sh similarity index 67% rename from .evergreen/run-mongodb-aws-test.sh rename to .evergreen/scripts/run-mongodb-aws-test.sh index c4051bb34a..ec20bfd06b 100755 --- a/.evergreen/run-mongodb-aws-test.sh +++ b/.evergreen/scripts/run-mongodb-aws-test.sh @@ -13,10 +13,16 @@ set -o errexit # Exit the script with error if any of the commands fail # mechanism. # PYTHON_BINARY The Python version to use. -echo "Running MONGODB-AWS authentication tests" +# shellcheck disable=SC2154 +if [ "${skip_EC2_auth_test:-}" = "true" ] && { [ "$1" = "ec2" ] || [ "$1" = "web-identity" ]; }; then + echo "This platform does not support the EC2 auth test, skipping..." + exit 0 +fi + +echo "Running MONGODB-AWS authentication tests for $1" # Handle credentials and environment setup. -. $DRIVERS_TOOLS/.evergreen/auth_aws/aws_setup.sh $1 +. "$DRIVERS_TOOLS"/.evergreen/auth_aws/aws_setup.sh "$1" # show test output set -x diff --git a/.evergreen/scripts/run-ocsp-test.sh b/.evergreen/scripts/run-ocsp-test.sh new file mode 100644 index 0000000000..3c6d3b2b3b --- /dev/null +++ b/.evergreen/scripts/run-ocsp-test.sh @@ -0,0 +1,8 @@ +#!/bin/bash + +TEST_OCSP=1 \ +PYTHON_BINARY="${PYTHON_BINARY}" \ +CA_FILE="${DRIVERS_TOOLS}/.evergreen/ocsp/${OCSP_ALGORITHM}/ca.pem" \ +OCSP_TLS_SHOULD_SUCCEED="${OCSP_TLS_SHOULD_SUCCEED}" \ +bash "${PROJECT_DIRECTORY}"/.evergreen/hatch.sh test:test-eg +bash "${DRIVERS_TOOLS}"/.evergreen/ocsp/teardown.sh diff --git a/.evergreen/scripts/run-perf-tests.sh b/.evergreen/scripts/run-perf-tests.sh new file mode 100644 index 0000000000..69a369fee1 --- /dev/null +++ b/.evergreen/scripts/run-perf-tests.sh @@ -0,0 +1,4 @@ +#!/bin/bash + +PROJECT_DIRECTORY=${PROJECT_DIRECTORY} +bash "${PROJECT_DIRECTORY}"/.evergreen/run-perf-tests.sh diff --git a/.evergreen/scripts/run-tests.sh b/.evergreen/scripts/run-tests.sh new file mode 100644 index 0000000000..495db83e70 --- /dev/null +++ b/.evergreen/scripts/run-tests.sh @@ -0,0 +1,55 @@ +#!/bin/bash + +# Disable xtrace +set +x +if [ -n "${MONGODB_STARTED}" ]; then + export PYMONGO_MUST_CONNECT=true +fi +if [ -n "${DISABLE_TEST_COMMANDS}" ]; then + export PYMONGO_DISABLE_TEST_COMMANDS=1 +fi +if [ -n "${test_encryption}" ]; then + # Disable xtrace (just in case it was accidentally set). + set +x + bash "${DRIVERS_TOOLS}"/.evergreen/csfle/await-servers.sh + export TEST_ENCRYPTION=1 + if [ -n "${test_encryption_pyopenssl}" ]; then + export TEST_ENCRYPTION_PYOPENSSL=1 + fi +fi +if [ -n "${test_crypt_shared}" ]; then + export TEST_CRYPT_SHARED=1 + export CRYPT_SHARED_LIB_PATH=${CRYPT_SHARED_LIB_PATH} +fi +if [ -n "${test_pyopenssl}" ]; then + export TEST_PYOPENSSL=1 +fi +if [ -n "${SETDEFAULTENCODING}" ]; then + export SETDEFAULTENCODING="${SETDEFAULTENCODING}" +fi +if [ -n "${test_loadbalancer}" ]; then + export TEST_LOADBALANCER=1 + export SINGLE_MONGOS_LB_URI="${SINGLE_MONGOS_LB_URI}" + export MULTI_MONGOS_LB_URI="${MULTI_MONGOS_LB_URI}" +fi +if [ -n "${test_serverless}" ]; then + export TEST_SERVERLESS=1 +fi +if [ -n "${TEST_INDEX_MANAGEMENT:-}" ]; then + export TEST_INDEX_MANAGEMENT=1 +fi +if [ -n "${SKIP_CSOT_TESTS}" ]; then + export SKIP_CSOT_TESTS=1 +fi +GREEN_FRAMEWORK=${GREEN_FRAMEWORK} \ + PYTHON_BINARY=${PYTHON_BINARY} \ + NO_EXT=${NO_EXT} \ + COVERAGE=${COVERAGE} \ + COMPRESSORS=${COMPRESSORS} \ + AUTH=${AUTH} \ + SSL=${SSL} \ + TEST_DATA_LAKE=${TEST_DATA_LAKE:-} \ + TEST_SUITES=${TEST_SUITES:-} \ + MONGODB_API_VERSION=${MONGODB_API_VERSION} \ + SKIP_HATCH=${SKIP_HATCH} \ + bash "${PROJECT_DIRECTORY}"/.evergreen/hatch.sh test:test-eg diff --git a/.evergreen/scripts/run-with-env.sh b/.evergreen/scripts/run-with-env.sh new file mode 100644 index 0000000000..2fd073605d --- /dev/null +++ b/.evergreen/scripts/run-with-env.sh @@ -0,0 +1,21 @@ +#!/bin/bash -eu + +# Example use: bash run-with-env.sh run-tests.sh {args...} + +# Parameter expansion to get just the current directory's name +if [ "${PWD##*/}" == "src" ]; then + . .evergreen/scripts/env.sh + if [ -f ".evergreen/scripts/test-env.sh" ]; then + . .evergreen/scripts/test-env.sh + fi +else + . src/.evergreen/scripts/env.sh + if [ -f "src/.evergreen/scripts/test-env.sh" ]; then + . src/.evergreen/scripts/test-env.sh + fi +fi + +set -eu + +# shellcheck source=/dev/null +. "$@" diff --git a/.evergreen/scripts/setup-encryption.sh b/.evergreen/scripts/setup-encryption.sh new file mode 100644 index 0000000000..2f167cd20b --- /dev/null +++ b/.evergreen/scripts/setup-encryption.sh @@ -0,0 +1,5 @@ +#!/bin/bash + +if [ -n "${test_encryption}" ]; then + ./.evergreen/hatch.sh encryption:setup +fi diff --git a/.evergreen/scripts/setup-tests.sh b/.evergreen/scripts/setup-tests.sh new file mode 100644 index 0000000000..65462b2a68 --- /dev/null +++ b/.evergreen/scripts/setup-tests.sh @@ -0,0 +1,27 @@ +#!/bin/bash -eux + +PROJECT_DIRECTORY="$(pwd)" +SCRIPT_DIR="$PROJECT_DIRECTORY/.evergreen/scripts" + +if [ -f "$SCRIPT_DIR/test-env.sh" ]; then + echo "Reading $SCRIPT_DIR/test-env.sh file" + . "$SCRIPT_DIR/test-env.sh" + exit 0 +fi + +cat < "$SCRIPT_DIR"/test-env.sh +export test_encryption="${test_encryption:-}" +export test_encryption_pyopenssl="${test_encryption_pyopenssl:-}" +export test_crypt_shared="${test_crypt_shared:-}" +export test_pyopenssl="${test_pyopenssl:-}" +export test_loadbalancer="${test_loadbalancer:-}" +export test_serverless="${test_serverless:-}" +export TEST_INDEX_MANAGEMENT="${TEST_INDEX_MANAGEMENT:-}" +export TEST_DATA_LAKE="${TEST_DATA_LAKE:-}" +export ORCHESTRATION_FILE="${ORCHESTRATION_FILE:-}" +export AUTH="${AUTH:-noauth}" +export SSL="${SSL:-nossl}" +export PYTHON_BINARY="${PYTHON_BINARY:-}" +EOT + +chmod +x "$SCRIPT_DIR"/test-env.sh diff --git a/.evergreen/scripts/stop-load-balancer.sh b/.evergreen/scripts/stop-load-balancer.sh new file mode 100644 index 0000000000..2d3c5366ec --- /dev/null +++ b/.evergreen/scripts/stop-load-balancer.sh @@ -0,0 +1,5 @@ +#!/bin/bash + +cd "${DRIVERS_TOOLS}"/.evergreen || exit +DRIVERS_TOOLS=${DRIVERS_TOOLS} +bash "${DRIVERS_TOOLS}"/.evergreen/run-load-balancer.sh stop diff --git a/.evergreen/scripts/teardown-aws.sh b/.evergreen/scripts/teardown-aws.sh new file mode 100644 index 0000000000..634d1e5724 --- /dev/null +++ b/.evergreen/scripts/teardown-aws.sh @@ -0,0 +1,7 @@ +#!/bin/bash + +cd "${DRIVERS_TOOLS}/.evergreen/auth_aws" || exit +if [ -f "./aws_e2e_setup.json" ]; then + . ./activate-authawsvenv.sh + python ./lib/aws_assign_instance_profile.py +fi diff --git a/.evergreen/scripts/teardown-docker.sh b/.evergreen/scripts/teardown-docker.sh new file mode 100644 index 0000000000..733779d058 --- /dev/null +++ b/.evergreen/scripts/teardown-docker.sh @@ -0,0 +1,7 @@ +#!/bin/bash + +# Remove all Docker images +DOCKER=$(command -v docker) || true +if [ -n "$DOCKER" ]; then + docker rmi -f "$(docker images -a -q)" &> /dev/null || true +fi diff --git a/.evergreen/scripts/upload-coverage-report.sh b/.evergreen/scripts/upload-coverage-report.sh new file mode 100644 index 0000000000..71a2a80bb8 --- /dev/null +++ b/.evergreen/scripts/upload-coverage-report.sh @@ -0,0 +1,3 @@ +#!/bin/bash + +aws s3 cp htmlcov/ s3://"$1"/coverage/"$2"/"$3"/htmlcov/ --recursive --acl public-read --region us-east-1 diff --git a/.evergreen/scripts/windows-fix.sh b/.evergreen/scripts/windows-fix.sh new file mode 100644 index 0000000000..cb4fa44130 --- /dev/null +++ b/.evergreen/scripts/windows-fix.sh @@ -0,0 +1,11 @@ +#!/bin/bash + +set +x +. src/.evergreen/scripts/env.sh +# shellcheck disable=SC2044 +for i in $(find "$DRIVERS_TOOLS"/.evergreen "$PROJECT_DIRECTORY"/.evergreen -name \*.sh); do + < "$i" tr -d '\r' >"$i".new + mv "$i".new "$i" +done +# Copy client certificate because symlinks do not work on Windows. +cp "$DRIVERS_TOOLS"/.evergreen/x509gen/client.pem "$MONGO_ORCHESTRATION_HOME"/lib/client.pem diff --git a/.evergreen/setup-encryption.sh b/.evergreen/setup-encryption.sh index 71231e1732..b403ef9ca8 100644 --- a/.evergreen/setup-encryption.sh +++ b/.evergreen/setup-encryption.sh @@ -52,6 +52,9 @@ ls -la libmongocrypt ls -la libmongocrypt/nocrypto if [ -z "${SKIP_SERVERS:-}" ]; then - bash ${DRIVERS_TOOLS}/.evergreen/csfle/setup-secrets.sh - bash ${DRIVERS_TOOLS}/.evergreen/csfle/start-servers.sh + PYTHON_BINARY_OLD=${PYTHON_BINARY} + export PYTHON_BINARY="" + bash "${DRIVERS_TOOLS}"/.evergreen/csfle/setup-secrets.sh + export PYTHON_BINARY=$PYTHON_BINARY_OLD + bash "${DRIVERS_TOOLS}"/.evergreen/csfle/start-servers.sh fi diff --git a/.evergreen/utils.sh b/.evergreen/utils.sh index d44425a905..908cf0564a 100755 --- a/.evergreen/utils.sh +++ b/.evergreen/utils.sh @@ -17,7 +17,7 @@ find_python3() { elif [ -d "/Library/Frameworks/Python.Framework/Versions/3.9" ]; then PYTHON="/Library/Frameworks/Python.Framework/Versions/3.9/bin/python3" fi - elif [ "Windows_NT" = "$OS" ]; then # Magic variable in cygwin + elif [ "Windows_NT" = "${OS:-}" ]; then # Magic variable in cygwin PYTHON="C:/python/Python39/python.exe" else # Prefer our own toolchain, fall back to mongodb toolchain if it has Python 3.9+. @@ -56,7 +56,7 @@ createvirtualenv () { # Workaround for bug in older versions of virtualenv. $VIRTUALENV $VENVPATH 2>/dev/null || $VIRTUALENV $VENVPATH fi - if [ "Windows_NT" = "$OS" ]; then + if [ "Windows_NT" = "${OS:-}" ]; then # Workaround https://bugs.python.org/issue32451: # mongovenv/Scripts/activate: line 3: $'\r': command not found dos2unix $VENVPATH/Scripts/activate || true diff --git a/test/asynchronous/test_client_context.py b/test/asynchronous/test_client_context.py index 6d77818436..6a195eb6b8 100644 --- a/test/asynchronous/test_client_context.py +++ b/test/asynchronous/test_client_context.py @@ -25,7 +25,7 @@ class TestAsyncClientContext(AsyncUnitTest): def test_must_connect(self): - if "PYMONGO_MUST_CONNECT" not in os.environ: + if not os.environ.get("PYMONGO_MUST_CONNECT"): raise SkipTest("PYMONGO_MUST_CONNECT is not set") self.assertTrue( @@ -37,7 +37,7 @@ def test_must_connect(self): ) def test_serverless(self): - if "TEST_SERVERLESS" not in os.environ: + if not os.environ.get("TEST_SERVERLESS"): raise SkipTest("TEST_SERVERLESS is not set") self.assertTrue( @@ -47,7 +47,7 @@ def test_serverless(self): ) def test_enableTestCommands_is_disabled(self): - if "PYMONGO_DISABLE_TEST_COMMANDS" not in os.environ: + if not os.environ.get("PYMONGO_DISABLE_TEST_COMMANDS"): raise SkipTest("PYMONGO_DISABLE_TEST_COMMANDS is not set") self.assertFalse( @@ -56,7 +56,7 @@ def test_enableTestCommands_is_disabled(self): ) def test_setdefaultencoding_worked(self): - if "SETDEFAULTENCODING" not in os.environ: + if not os.environ.get("SETDEFAULTENCODING"): raise SkipTest("SETDEFAULTENCODING is not set") self.assertEqual(sys.getdefaultencoding(), os.environ["SETDEFAULTENCODING"]) diff --git a/test/mod_wsgi_test/README.rst b/test/mod_wsgi_test/README.rst index 2c204f7ac5..e96db9406c 100644 --- a/test/mod_wsgi_test/README.rst +++ b/test/mod_wsgi_test/README.rst @@ -107,4 +107,4 @@ Automation At MongoDB, Inc. we use a continuous integration job that tests each combination in the matrix. The job starts up Apache, starts a single server or replica set, and runs ``test_client.py`` with the proper arguments. -See `run-mod-wsgi-tests.sh `_ +See `run-mod-wsgi-tests.sh `_ diff --git a/test/test_client_context.py b/test/test_client_context.py index 5996f9243b..e807ac5f5f 100644 --- a/test/test_client_context.py +++ b/test/test_client_context.py @@ -25,7 +25,7 @@ class TestClientContext(UnitTest): def test_must_connect(self): - if "PYMONGO_MUST_CONNECT" not in os.environ: + if not os.environ.get("PYMONGO_MUST_CONNECT"): raise SkipTest("PYMONGO_MUST_CONNECT is not set") self.assertTrue( @@ -37,7 +37,7 @@ def test_must_connect(self): ) def test_serverless(self): - if "TEST_SERVERLESS" not in os.environ: + if not os.environ.get("TEST_SERVERLESS"): raise SkipTest("TEST_SERVERLESS is not set") self.assertTrue( @@ -47,7 +47,7 @@ def test_serverless(self): ) def test_enableTestCommands_is_disabled(self): - if "PYMONGO_DISABLE_TEST_COMMANDS" not in os.environ: + if not os.environ.get("PYMONGO_DISABLE_TEST_COMMANDS"): raise SkipTest("PYMONGO_DISABLE_TEST_COMMANDS is not set") self.assertFalse( @@ -56,7 +56,7 @@ def test_enableTestCommands_is_disabled(self): ) def test_setdefaultencoding_worked(self): - if "SETDEFAULTENCODING" not in os.environ: + if not os.environ.get("SETDEFAULTENCODING"): raise SkipTest("SETDEFAULTENCODING is not set") self.assertEqual(sys.getdefaultencoding(), os.environ["SETDEFAULTENCODING"]) From 9b5c0981d91aa1baf23d6300bc033fd832457ae4 Mon Sep 17 00:00:00 2001 From: Jib Date: Mon, 25 Nov 2024 13:13:44 -0500 Subject: [PATCH 114/182] PYTHON-4988: Check C extensions are loaded ONLY in CPython builds (#2016) --- .evergreen/run-tests.sh | 4 ++-- .evergreen/utils.sh | 7 ++++++- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/.evergreen/run-tests.sh b/.evergreen/run-tests.sh index 9716c1fc79..95fe10a6c3 100755 --- a/.evergreen/run-tests.sh +++ b/.evergreen/run-tests.sh @@ -38,6 +38,7 @@ export PIP_PREFER_BINARY=1 # Prefer binary dists by default set +x python -c "import sys; sys.exit(sys.prefix == sys.base_prefix)" || (echo "Not inside a virtual env!"; exit 1) +PYTHON_IMPL=$(python -c "import platform; print(platform.python_implementation())") # Try to source local Drivers Secrets if [ -f ./secrets-export.sh ]; then @@ -48,7 +49,7 @@ else fi # Ensure C extensions have compiled. -if [ -z "${NO_EXT:-}" ]; then +if [ -z "${NO_EXT:-}" ] && [ "$PYTHON_IMPL" = "CPython" ]; then python tools/fail_if_no_c.py fi @@ -245,7 +246,6 @@ python -c 'import sys; print(sys.version)' # Run the tests with coverage if requested and coverage is installed. # Only cover CPython. PyPy reports suspiciously low coverage. -PYTHON_IMPL=$(python -c "import platform; print(platform.python_implementation())") if [ -n "$COVERAGE" ] && [ "$PYTHON_IMPL" = "CPython" ]; then # Keep in sync with combine-coverage.sh. # coverage >=5 is needed for relative_files=true. diff --git a/.evergreen/utils.sh b/.evergreen/utils.sh index 908cf0564a..d3af2dcc7a 100755 --- a/.evergreen/utils.sh +++ b/.evergreen/utils.sh @@ -78,6 +78,7 @@ testinstall () { PYTHON=$1 RELEASE=$2 NO_VIRTUALENV=$3 + PYTHON_IMPL=$(python -c "import platform; print(platform.python_implementation())") if [ -z "$NO_VIRTUALENV" ]; then createvirtualenv $PYTHON venvtestinstall @@ -86,7 +87,11 @@ testinstall () { $PYTHON -m pip install --upgrade $RELEASE cd tools - $PYTHON fail_if_no_c.py + + if [ "$PYTHON_IMPL" = "CPython" ]; then + $PYTHON fail_if_no_c.py + fi + $PYTHON -m pip uninstall -y pymongo cd .. From 0e8d70457f2e1ac05baff0c9f5232ddee2b0abcf Mon Sep 17 00:00:00 2001 From: Noah Stapp Date: Tue, 26 Nov 2024 16:55:27 -0500 Subject: [PATCH 115/182] Async client uses tasks instead of threads PYTHON-4725 - Async client should use tasks for SDAM instead of threads PYTHON-4860 - Async client should use asyncio.Lock and asyncio.Condition PYTHON-4941 - Synchronous unified test runner being used in asynchronous tests PYTHON-4843 - Async test suite should use a single event loop PYTHON-4945 - Fix test cleanups for mongoses Co-authored-by: Iris <58442094+sleepyStick@users.noreply.github.com> --- .evergreen/config.yml | 2 +- THIRD-PARTY-NOTICES | 58 ++ pymongo/_asyncio_lock.py | 309 +++++++ pymongo/_asyncio_task.py | 49 ++ pymongo/asynchronous/client_bulk.py | 1 - pymongo/asynchronous/cursor.py | 4 +- pymongo/asynchronous/encryption.py | 7 + pymongo/asynchronous/mongo_client.py | 25 +- pymongo/asynchronous/monitor.py | 54 +- pymongo/asynchronous/periodic_executor.py | 219 ----- pymongo/asynchronous/pool.py | 28 +- pymongo/asynchronous/topology.py | 24 +- pymongo/lock.py | 245 +----- pymongo/network_layer.py | 10 +- .../{synchronous => }/periodic_executor.py | 113 ++- pymongo/synchronous/client_bulk.py | 1 - pymongo/synchronous/cursor.py | 2 +- pymongo/synchronous/encryption.py | 7 + pymongo/synchronous/mongo_client.py | 21 +- pymongo/synchronous/monitor.py | 18 +- pymongo/synchronous/pool.py | 28 +- pymongo/synchronous/topology.py | 20 +- test/__init__.py | 97 +-- test/asynchronous/__init__.py | 101 +-- test/asynchronous/conftest.py | 2 +- test/asynchronous/test_bulk.py | 36 +- test/asynchronous/test_change_stream.py | 43 +- test/asynchronous/test_client.py | 80 +- test/asynchronous/test_collation.py | 33 +- test/asynchronous/test_collection.py | 41 +- ...nnections_survive_primary_stepdown_spec.py | 31 +- test/asynchronous/test_create_entities.py | 6 + test/asynchronous/test_cursor.py | 14 +- test/asynchronous/test_database.py | 3 +- test/asynchronous/test_encryption.py | 140 ++- test/asynchronous/test_grid_file.py | 1 + test/asynchronous/test_locks.py | 817 ++++++++---------- test/asynchronous/test_monitoring.py | 41 +- test/asynchronous/test_retryable_writes.py | 65 +- test/asynchronous/test_session.py | 33 +- test/asynchronous/test_transactions.py | 23 +- test/asynchronous/unified_format.py | 60 +- test/asynchronous/utils_spec_runner.py | 26 +- test/conftest.py | 2 +- test/test_bulk.py | 32 +- test/test_change_stream.py | 39 +- test/test_client.py | 31 +- test/test_collation.py | 31 +- test/test_collection.py | 38 +- ...nnections_survive_primary_stepdown_spec.py | 31 +- test/test_create_entities.py | 6 + test/test_cursor.py | 4 - test/test_custom_types.py | 23 +- test/test_database.py | 1 + test/test_encryption.py | 138 ++- test/test_examples.py | 13 +- test/test_grid_file.py | 1 + test/test_gridfs.py | 20 +- test/test_gridfs_bucket.py | 14 +- test/test_monitor.py | 2 +- test/test_monitoring.py | 39 +- test/test_read_concern.py | 20 +- test/test_retryable_writes.py | 65 +- test/test_sdam_monitoring_spec.py | 2 +- test/test_session.py | 32 +- test/test_threads.py | 1 + test/test_transactions.py | 15 +- test/test_typing.py | 7 +- test/unified_format.py | 53 +- test/utils.py | 11 +- test/utils_spec_runner.py | 26 +- tools/synchro.py | 39 +- 72 files changed, 1715 insertions(+), 1959 deletions(-) create mode 100644 pymongo/_asyncio_lock.py create mode 100644 pymongo/_asyncio_task.py delete mode 100644 pymongo/asynchronous/periodic_executor.py rename pymongo/{synchronous => }/periodic_executor.py (67%) diff --git a/.evergreen/config.yml b/.evergreen/config.yml index 59b8a543fd..7ca3a72b1a 100644 --- a/.evergreen/config.yml +++ b/.evergreen/config.yml @@ -281,7 +281,7 @@ functions: "run tests": - command: subprocess.exec params: - include_expansions_in_env: ["TEST_DATA_LAKE", "AUTH", "SSL", "TEST_INDEX_MANAGEMENT", "CRYPT_SHARED_LIB_PATH", "test_encryption", "test_encryption_pyopenssl", "test_crypt_shared", "test_pyopenssl", "test_loadbalancer", "test_serverless", "ORCHESTRATION_FILE"] + include_expansions_in_env: ["TEST_DATA_LAKE", "PYTHON_BINARY", "AUTH", "SSL", "TEST_INDEX_MANAGEMENT", "CRYPT_SHARED_LIB_PATH", "test_encryption", "test_encryption_pyopenssl", "test_crypt_shared", "test_pyopenssl", "test_loadbalancer", "test_serverless", "ORCHESTRATION_FILE"] binary: bash working_dir: "src" args: diff --git a/THIRD-PARTY-NOTICES b/THIRD-PARTY-NOTICES index 55b8ff7078..ad00831a2a 100644 --- a/THIRD-PARTY-NOTICES +++ b/THIRD-PARTY-NOTICES @@ -38,3 +38,61 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +2) License Notice for _asyncio_lock.py +----------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001-2024 Python Software Foundation; All Rights Reserved" +are retained in Python alone or in any derivative version prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. diff --git a/pymongo/_asyncio_lock.py b/pymongo/_asyncio_lock.py new file mode 100644 index 0000000000..669b0f63a7 --- /dev/null +++ b/pymongo/_asyncio_lock.py @@ -0,0 +1,309 @@ +# Copyright (c) 2001-2024 Python Software Foundation; All Rights Reserved + +"""Lock and Condition classes vendored from https://github.com/python/cpython/blob/main/Lib/asyncio/locks.py +to port 3.13 fixes to older versions of Python. +Can be removed once we drop Python 3.12 support.""" + +from __future__ import annotations + +import collections +import threading +from asyncio import events, exceptions +from typing import Any, Coroutine, Optional + +_global_lock = threading.Lock() + + +class _LoopBoundMixin: + _loop = None + + def _get_loop(self) -> Any: + loop = events._get_running_loop() + + if self._loop is None: + with _global_lock: + if self._loop is None: + self._loop = loop + if loop is not self._loop: + raise RuntimeError(f"{self!r} is bound to a different event loop") + return loop + + +class _ContextManagerMixin: + async def __aenter__(self) -> None: + await self.acquire() # type: ignore[attr-defined] + # We have no use for the "as ..." clause in the with + # statement for locks. + return + + async def __aexit__(self, exc_type: Any, exc: Any, tb: Any) -> None: + self.release() # type: ignore[attr-defined] + + +class Lock(_ContextManagerMixin, _LoopBoundMixin): + """Primitive lock objects. + + A primitive lock is a synchronization primitive that is not owned + by a particular task when locked. A primitive lock is in one + of two states, 'locked' or 'unlocked'. + + It is created in the unlocked state. It has two basic methods, + acquire() and release(). When the state is unlocked, acquire() + changes the state to locked and returns immediately. When the + state is locked, acquire() blocks until a call to release() in + another task changes it to unlocked, then the acquire() call + resets it to locked and returns. The release() method should only + be called in the locked state; it changes the state to unlocked + and returns immediately. If an attempt is made to release an + unlocked lock, a RuntimeError will be raised. + + When more than one task is blocked in acquire() waiting for + the state to turn to unlocked, only one task proceeds when a + release() call resets the state to unlocked; successive release() + calls will unblock tasks in FIFO order. + + Locks also support the asynchronous context management protocol. + 'async with lock' statement should be used. + + Usage: + + lock = Lock() + ... + await lock.acquire() + try: + ... + finally: + lock.release() + + Context manager usage: + + lock = Lock() + ... + async with lock: + ... + + Lock objects can be tested for locking state: + + if not lock.locked(): + await lock.acquire() + else: + # lock is acquired + ... + + """ + + def __init__(self) -> None: + self._waiters: Optional[collections.deque] = None + self._locked = False + + def __repr__(self) -> str: + res = super().__repr__() + extra = "locked" if self._locked else "unlocked" + if self._waiters: + extra = f"{extra}, waiters:{len(self._waiters)}" + return f"<{res[1:-1]} [{extra}]>" + + def locked(self) -> bool: + """Return True if lock is acquired.""" + return self._locked + + async def acquire(self) -> bool: + """Acquire a lock. + + This method blocks until the lock is unlocked, then sets it to + locked and returns True. + """ + # Implement fair scheduling, where thread always waits + # its turn. Jumping the queue if all are cancelled is an optimization. + if not self._locked and ( + self._waiters is None or all(w.cancelled() for w in self._waiters) + ): + self._locked = True + return True + + if self._waiters is None: + self._waiters = collections.deque() + fut = self._get_loop().create_future() + self._waiters.append(fut) + + try: + try: + await fut + finally: + self._waiters.remove(fut) + except exceptions.CancelledError: + # Currently the only exception designed be able to occur here. + + # Ensure the lock invariant: If lock is not claimed (or about + # to be claimed by us) and there is a Task in waiters, + # ensure that the Task at the head will run. + if not self._locked: + self._wake_up_first() + raise + + # assert self._locked is False + self._locked = True + return True + + def release(self) -> None: + """Release a lock. + + When the lock is locked, reset it to unlocked, and return. + If any other tasks are blocked waiting for the lock to become + unlocked, allow exactly one of them to proceed. + + When invoked on an unlocked lock, a RuntimeError is raised. + + There is no return value. + """ + if self._locked: + self._locked = False + self._wake_up_first() + else: + raise RuntimeError("Lock is not acquired.") + + def _wake_up_first(self) -> None: + """Ensure that the first waiter will wake up.""" + if not self._waiters: + return + try: + fut = next(iter(self._waiters)) + except StopIteration: + return + + # .done() means that the waiter is already set to wake up. + if not fut.done(): + fut.set_result(True) + + +class Condition(_ContextManagerMixin, _LoopBoundMixin): + """Asynchronous equivalent to threading.Condition. + + This class implements condition variable objects. A condition variable + allows one or more tasks to wait until they are notified by another + task. + + A new Lock object is created and used as the underlying lock. + """ + + def __init__(self, lock: Optional[Lock] = None) -> None: + if lock is None: + lock = Lock() + + self._lock = lock + # Export the lock's locked(), acquire() and release() methods. + self.locked = lock.locked + self.acquire = lock.acquire + self.release = lock.release + + self._waiters: collections.deque = collections.deque() + + def __repr__(self) -> str: + res = super().__repr__() + extra = "locked" if self.locked() else "unlocked" + if self._waiters: + extra = f"{extra}, waiters:{len(self._waiters)}" + return f"<{res[1:-1]} [{extra}]>" + + async def wait(self) -> bool: + """Wait until notified. + + If the calling task has not acquired the lock when this + method is called, a RuntimeError is raised. + + This method releases the underlying lock, and then blocks + until it is awakened by a notify() or notify_all() call for + the same condition variable in another task. Once + awakened, it re-acquires the lock and returns True. + + This method may return spuriously, + which is why the caller should always + re-check the state and be prepared to wait() again. + """ + if not self.locked(): + raise RuntimeError("cannot wait on un-acquired lock") + + fut = self._get_loop().create_future() + self.release() + try: + try: + self._waiters.append(fut) + try: + await fut + return True + finally: + self._waiters.remove(fut) + + finally: + # Must re-acquire lock even if wait is cancelled. + # We only catch CancelledError here, since we don't want any + # other (fatal) errors with the future to cause us to spin. + err = None + while True: + try: + await self.acquire() + break + except exceptions.CancelledError as e: + err = e + + if err is not None: + try: + raise err # Re-raise most recent exception instance. + finally: + err = None # Break reference cycles. + except BaseException: + # Any error raised out of here _may_ have occurred after this Task + # believed to have been successfully notified. + # Make sure to notify another Task instead. This may result + # in a "spurious wakeup", which is allowed as part of the + # Condition Variable protocol. + self._notify(1) + raise + + async def wait_for(self, predicate: Any) -> Coroutine: + """Wait until a predicate becomes true. + + The predicate should be a callable whose result will be + interpreted as a boolean value. The method will repeatedly + wait() until it evaluates to true. The final predicate value is + the return value. + """ + result = predicate() + while not result: + await self.wait() + result = predicate() + return result + + def notify(self, n: int = 1) -> None: + """By default, wake up one task waiting on this condition, if any. + If the calling task has not acquired the lock when this method + is called, a RuntimeError is raised. + + This method wakes up n of the tasks waiting for the condition + variable; if fewer than n are waiting, they are all awoken. + + Note: an awakened task does not actually return from its + wait() call until it can reacquire the lock. Since notify() does + not release the lock, its caller should. + """ + if not self.locked(): + raise RuntimeError("cannot notify on un-acquired lock") + self._notify(n) + + def _notify(self, n: int) -> None: + idx = 0 + for fut in self._waiters: + if idx >= n: + break + + if not fut.done(): + idx += 1 + fut.set_result(False) + + def notify_all(self) -> None: + """Wake up all tasks waiting on this condition. This method acts + like notify(), but wakes up all waiting tasks instead of one. If the + calling task has not acquired the lock when this method is called, + a RuntimeError is raised. + """ + self.notify(len(self._waiters)) diff --git a/pymongo/_asyncio_task.py b/pymongo/_asyncio_task.py new file mode 100644 index 0000000000..8e457763d9 --- /dev/null +++ b/pymongo/_asyncio_task.py @@ -0,0 +1,49 @@ +# Copyright 2024-present MongoDB, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""A custom asyncio.Task that allows checking if a task has been sent a cancellation request. +Can be removed once we drop Python 3.10 support in favor of asyncio.Task.cancelling.""" + + +from __future__ import annotations + +import asyncio +import sys +from typing import Any, Coroutine, Optional + + +# TODO (https://jira.mongodb.org/browse/PYTHON-4981): Revisit once the underlying cause of the swallowed cancellations is uncovered +class _Task(asyncio.Task): + def __init__(self, coro: Coroutine[Any, Any, Any], *, name: Optional[str] = None) -> None: + super().__init__(coro, name=name) + self._cancel_requests = 0 + asyncio._register_task(self) + + def cancel(self, msg: Optional[str] = None) -> bool: + self._cancel_requests += 1 + return super().cancel(msg=msg) + + def uncancel(self) -> int: + if self._cancel_requests > 0: + self._cancel_requests -= 1 + return self._cancel_requests + + def cancelling(self) -> int: + return self._cancel_requests + + +def create_task(coro: Coroutine[Any, Any, Any], *, name: Optional[str] = None) -> asyncio.Task: + if sys.version_info >= (3, 11): + return asyncio.create_task(coro, name=name) + return _Task(coro, name=name) diff --git a/pymongo/asynchronous/client_bulk.py b/pymongo/asynchronous/client_bulk.py index 0dcdaa6c07..45824256da 100644 --- a/pymongo/asynchronous/client_bulk.py +++ b/pymongo/asynchronous/client_bulk.py @@ -476,7 +476,6 @@ async def _process_results_cursor( if op_type == "delete": res = DeleteResult(doc, acknowledged=True) # type: ignore[assignment] full_result[f"{op_type}Results"][original_index] = res - except Exception as exc: # Attempt to close the cursor, then raise top-level error. if cmd_cursor.alive: diff --git a/pymongo/asynchronous/cursor.py b/pymongo/asynchronous/cursor.py index 4b4bb52a8e..7d7ae4a5db 100644 --- a/pymongo/asynchronous/cursor.py +++ b/pymongo/asynchronous/cursor.py @@ -45,7 +45,7 @@ ) from pymongo.cursor_shared import _CURSOR_CLOSED_ERRORS, _QUERY_OPTIONS, CursorType, _Hint, _Sort from pymongo.errors import ConnectionFailure, InvalidOperation, OperationFailure -from pymongo.lock import _ALock, _create_lock +from pymongo.lock import _async_create_lock from pymongo.message import ( _CursorAddress, _GetMore, @@ -77,7 +77,7 @@ class _ConnectionManager: def __init__(self, conn: AsyncConnection, more_to_come: bool): self.conn: Optional[AsyncConnection] = conn self.more_to_come = more_to_come - self._alock = _ALock(_create_lock()) + self._lock = _async_create_lock() def update_exhaust(self, more_to_come: bool) -> None: self.more_to_come = more_to_come diff --git a/pymongo/asynchronous/encryption.py b/pymongo/asynchronous/encryption.py index 735e543047..4802c3f54e 100644 --- a/pymongo/asynchronous/encryption.py +++ b/pymongo/asynchronous/encryption.py @@ -15,6 +15,7 @@ """Support for explicit client-side field level encryption.""" from __future__ import annotations +import asyncio import contextlib import enum import socket @@ -111,6 +112,8 @@ def _wrap_encryption_errors() -> Iterator[None]: # BSON encoding/decoding errors are unrelated to encryption so # we should propagate them unchanged. raise + except asyncio.CancelledError: + raise except Exception as exc: raise EncryptionError(exc) from exc @@ -200,6 +203,8 @@ async def kms_request(self, kms_context: MongoCryptKmsContext) -> None: conn.close() except (PyMongoError, MongoCryptError): raise # Propagate pymongo errors directly. + except asyncio.CancelledError: + raise except Exception as error: # Wrap I/O errors in PyMongo exceptions. _raise_connection_failure((host, port), error) @@ -722,6 +727,8 @@ async def create_encrypted_collection( await database.create_collection(name=name, **kwargs), encrypted_fields, ) + except asyncio.CancelledError: + raise except Exception as exc: raise EncryptedCollectionError(exc, encrypted_fields) from exc diff --git a/pymongo/asynchronous/mongo_client.py b/pymongo/asynchronous/mongo_client.py index 3e4dc482d7..1600e50628 100644 --- a/pymongo/asynchronous/mongo_client.py +++ b/pymongo/asynchronous/mongo_client.py @@ -32,6 +32,7 @@ """ from __future__ import annotations +import asyncio import contextlib import os import warnings @@ -59,8 +60,8 @@ from bson.codec_options import DEFAULT_CODEC_OPTIONS, CodecOptions, TypeRegistry from bson.timestamp import Timestamp -from pymongo import _csot, common, helpers_shared, uri_parser -from pymongo.asynchronous import client_session, database, periodic_executor +from pymongo import _csot, common, helpers_shared, periodic_executor, uri_parser +from pymongo.asynchronous import client_session, database from pymongo.asynchronous.change_stream import AsyncChangeStream, AsyncClusterChangeStream from pymongo.asynchronous.client_bulk import _AsyncClientBulk from pymongo.asynchronous.client_session import _EmptyServerSession @@ -82,7 +83,11 @@ WaitQueueTimeoutError, WriteConcernError, ) -from pymongo.lock import _HAS_REGISTER_AT_FORK, _ALock, _create_lock, _release_locks +from pymongo.lock import ( + _HAS_REGISTER_AT_FORK, + _async_create_lock, + _release_locks, +) from pymongo.logger import _CLIENT_LOGGER, _log_or_warn from pymongo.message import _CursorAddress, _GetMore, _Query from pymongo.monitoring import ConnectionClosedReason @@ -842,7 +847,7 @@ def __init__( self._options = options = ClientOptions(username, password, dbase, opts, _IS_SYNC) self._default_database_name = dbase - self._lock = _ALock(_create_lock()) + self._lock = _async_create_lock() self._kill_cursors_queue: list = [] self._event_listeners = options.pool_options._event_listeners @@ -908,7 +913,7 @@ async def target() -> bool: await AsyncMongoClient._process_periodic_tasks(client) return True - executor = periodic_executor.PeriodicExecutor( + executor = periodic_executor.AsyncPeriodicExecutor( interval=common.KILL_CURSOR_FREQUENCY, min_interval=common.MIN_HEARTBEAT_INTERVAL, target=target, @@ -1722,7 +1727,7 @@ async def _run_operation( address=address, ) - async with operation.conn_mgr._alock: + async with operation.conn_mgr._lock: async with _MongoClientErrorHandler(self, server, operation.session) as err_handler: # type: ignore[arg-type] err_handler.contribute_socket(operation.conn_mgr.conn) return await server.run_operation( @@ -1970,7 +1975,7 @@ async def _close_cursor_now( try: if conn_mgr: - async with conn_mgr._alock: + async with conn_mgr._lock: # Cursor is pinned to LB outside of a transaction. assert address is not None assert conn_mgr.conn is not None @@ -2033,6 +2038,8 @@ async def _process_kill_cursors(self) -> None: for address, cursor_id, conn_mgr in pinned_cursors: try: await self._cleanup_cursor_lock(cursor_id, address, conn_mgr, None, False) + except asyncio.CancelledError: + raise except Exception as exc: if isinstance(exc, InvalidOperation) and self._topology._closed: # Raise the exception when client is closed so that it @@ -2047,6 +2054,8 @@ async def _process_kill_cursors(self) -> None: for address, cursor_ids in address_to_cursor_ids.items(): try: await self._kill_cursors(cursor_ids, address, topology, session=None) + except asyncio.CancelledError: + raise except Exception as exc: if isinstance(exc, InvalidOperation) and self._topology._closed: raise @@ -2061,6 +2070,8 @@ async def _process_periodic_tasks(self) -> None: try: await self._process_kill_cursors() await self._topology.update_pool() + except asyncio.CancelledError: + raise except Exception as exc: if isinstance(exc, InvalidOperation) and self._topology._closed: return diff --git a/pymongo/asynchronous/monitor.py b/pymongo/asynchronous/monitor.py index a4dc9b7f45..ad1bc70aba 100644 --- a/pymongo/asynchronous/monitor.py +++ b/pymongo/asynchronous/monitor.py @@ -16,20 +16,20 @@ from __future__ import annotations +import asyncio import atexit import logging import time import weakref from typing import TYPE_CHECKING, Any, Mapping, Optional, cast -from pymongo import common +from pymongo import common, periodic_executor from pymongo._csot import MovingMinimum -from pymongo.asynchronous import periodic_executor -from pymongo.asynchronous.periodic_executor import _shutdown_executors from pymongo.errors import NetworkTimeout, NotPrimaryError, OperationFailure, _OperationCancelled from pymongo.hello import Hello -from pymongo.lock import _create_lock +from pymongo.lock import _async_create_lock from pymongo.logger import _SDAM_LOGGER, _debug_log, _SDAMStatusMessage +from pymongo.periodic_executor import _shutdown_executors from pymongo.pool_options import _is_faas from pymongo.read_preferences import MovingAverage from pymongo.server_description import ServerDescription @@ -76,7 +76,7 @@ async def target() -> bool: await monitor._run() # type:ignore[attr-defined] return True - executor = periodic_executor.PeriodicExecutor( + executor = periodic_executor.AsyncPeriodicExecutor( interval=interval, min_interval=min_interval, target=target, name=name ) @@ -112,9 +112,9 @@ async def close(self) -> None: """ self.gc_safe_close() - def join(self, timeout: Optional[int] = None) -> None: + async def join(self, timeout: Optional[int] = None) -> None: """Wait for the monitor to stop.""" - self._executor.join(timeout) + await self._executor.join(timeout) def request_check(self) -> None: """If the monitor is sleeping, wake it soon.""" @@ -139,7 +139,7 @@ def __init__( """ super().__init__( topology, - "pymongo_server_monitor_thread", + "pymongo_server_monitor_task", topology_settings.heartbeat_frequency, common.MIN_HEARTBEAT_INTERVAL, ) @@ -238,6 +238,9 @@ async def _run(self) -> None: except ReferenceError: # Topology was garbage-collected. await self.close() + finally: + if self._executor._stopped: + await self._rtt_monitor.close() async def _check_server(self) -> ServerDescription: """Call hello or read the next streaming response. @@ -252,8 +255,10 @@ async def _check_server(self) -> ServerDescription: except (OperationFailure, NotPrimaryError) as exc: # Update max cluster time even when hello fails. details = cast(Mapping[str, Any], exc.details) - self._topology.receive_cluster_time(details.get("$clusterTime")) + await self._topology.receive_cluster_time(details.get("$clusterTime")) raise + except asyncio.CancelledError: + raise except ReferenceError: raise except Exception as error: @@ -280,7 +285,7 @@ async def _check_server(self) -> ServerDescription: await self._reset_connection() if isinstance(error, _OperationCancelled): raise - self._rtt_monitor.reset() + await self._rtt_monitor.reset() # Server type defaults to Unknown. return ServerDescription(address, error=error) @@ -321,9 +326,9 @@ async def _check_once(self) -> ServerDescription: self._conn_id = conn.id response, round_trip_time = await self._check_with_socket(conn) if not response.awaitable: - self._rtt_monitor.add_sample(round_trip_time) + await self._rtt_monitor.add_sample(round_trip_time) - avg_rtt, min_rtt = self._rtt_monitor.get() + avg_rtt, min_rtt = await self._rtt_monitor.get() sd = ServerDescription(address, response, avg_rtt, min_round_trip_time=min_rtt) if self._publish: assert self._listeners is not None @@ -419,6 +424,8 @@ def _get_seedlist(self) -> Optional[list[tuple[str, Any]]]: if len(seedlist) == 0: # As per the spec: this should be treated as a failure. raise Exception + except asyncio.CancelledError: + raise except Exception: # As per the spec, upon encountering an error: # - An error must not be raised @@ -439,7 +446,7 @@ def __init__(self, topology: Topology, topology_settings: TopologySettings, pool """ super().__init__( topology, - "pymongo_server_rtt_thread", + "pymongo_server_rtt_task", topology_settings.heartbeat_frequency, common.MIN_HEARTBEAT_INTERVAL, ) @@ -447,7 +454,7 @@ def __init__(self, topology: Topology, topology_settings: TopologySettings, pool self._pool = pool self._moving_average = MovingAverage() self._moving_min = MovingMinimum() - self._lock = _create_lock() + self._lock = _async_create_lock() async def close(self) -> None: self.gc_safe_close() @@ -455,20 +462,20 @@ async def close(self) -> None: # thread has the socket checked out, it will be closed when checked in. await self._pool.reset() - def add_sample(self, sample: float) -> None: + async def add_sample(self, sample: float) -> None: """Add a RTT sample.""" - with self._lock: + async with self._lock: self._moving_average.add_sample(sample) self._moving_min.add_sample(sample) - def get(self) -> tuple[Optional[float], float]: + async def get(self) -> tuple[Optional[float], float]: """Get the calculated average, or None if no samples yet and the min.""" - with self._lock: + async with self._lock: return self._moving_average.get(), self._moving_min.get() - def reset(self) -> None: + async def reset(self) -> None: """Reset the average RTT.""" - with self._lock: + async with self._lock: self._moving_average.reset() self._moving_min.reset() @@ -478,10 +485,12 @@ async def _run(self) -> None: # heartbeat protocol (MongoDB 4.4+). # XXX: Skip check if the server is unknown? rtt = await self._ping() - self.add_sample(rtt) + await self.add_sample(rtt) except ReferenceError: # Topology was garbage-collected. await self.close() + except asyncio.CancelledError: + raise except Exception: await self._pool.reset() @@ -536,4 +545,5 @@ def _shutdown_resources() -> None: shutdown() -atexit.register(_shutdown_resources) +if _IS_SYNC: + atexit.register(_shutdown_resources) diff --git a/pymongo/asynchronous/periodic_executor.py b/pymongo/asynchronous/periodic_executor.py deleted file mode 100644 index f3d2fddba3..0000000000 --- a/pymongo/asynchronous/periodic_executor.py +++ /dev/null @@ -1,219 +0,0 @@ -# Copyright 2014-present MongoDB, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you -# may not use this file except in compliance with the License. You -# may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. See the License for the specific language governing -# permissions and limitations under the License. - -"""Run a target function on a background thread.""" - -from __future__ import annotations - -import asyncio -import sys -import threading -import time -import weakref -from typing import Any, Optional - -from pymongo.lock import _ALock, _create_lock - -_IS_SYNC = False - - -class PeriodicExecutor: - def __init__( - self, - interval: float, - min_interval: float, - target: Any, - name: Optional[str] = None, - ): - """Run a target function periodically on a background thread. - - If the target's return value is false, the executor stops. - - :param interval: Seconds between calls to `target`. - :param min_interval: Minimum seconds between calls if `wake` is - called very often. - :param target: A function. - :param name: A name to give the underlying thread. - """ - # threading.Event and its internal condition variable are expensive - # in Python 2, see PYTHON-983. Use a boolean to know when to wake. - # The executor's design is constrained by several Python issues, see - # "periodic_executor.rst" in this repository. - self._event = False - self._interval = interval - self._min_interval = min_interval - self._target = target - self._stopped = False - self._thread: Optional[threading.Thread] = None - self._name = name - self._skip_sleep = False - self._thread_will_exit = False - self._lock = _ALock(_create_lock()) - - def __repr__(self) -> str: - return f"<{self.__class__.__name__}(name={self._name}) object at 0x{id(self):x}>" - - def _run_async(self) -> None: - # The default asyncio loop implementation on Windows - # has issues with sharing sockets across loops (https://github.com/python/cpython/issues/122240) - # We explicitly use a different loop implementation here to prevent that issue - if sys.platform == "win32": - loop = asyncio.SelectorEventLoop() - try: - loop.run_until_complete(self._run()) # type: ignore[func-returns-value] - finally: - loop.close() - else: - asyncio.run(self._run()) # type: ignore[func-returns-value] - - def open(self) -> None: - """Start. Multiple calls have no effect. - - Not safe to call from multiple threads at once. - """ - with self._lock: - if self._thread_will_exit: - # If the background thread has read self._stopped as True - # there is a chance that it has not yet exited. The call to - # join should not block indefinitely because there is no - # other work done outside the while loop in self._run. - try: - assert self._thread is not None - self._thread.join() - except ReferenceError: - # Thread terminated. - pass - self._thread_will_exit = False - self._stopped = False - started: Any = False - try: - started = self._thread and self._thread.is_alive() - except ReferenceError: - # Thread terminated. - pass - - if not started: - if _IS_SYNC: - thread = threading.Thread(target=self._run, name=self._name) - else: - thread = threading.Thread(target=self._run_async, name=self._name) - thread.daemon = True - self._thread = weakref.proxy(thread) - _register_executor(self) - # Mitigation to RuntimeError firing when thread starts on shutdown - # https://github.com/python/cpython/issues/114570 - try: - thread.start() - except RuntimeError as e: - if "interpreter shutdown" in str(e) or sys.is_finalizing(): - self._thread = None - return - raise - - def close(self, dummy: Any = None) -> None: - """Stop. To restart, call open(). - - The dummy parameter allows an executor's close method to be a weakref - callback; see monitor.py. - """ - self._stopped = True - - def join(self, timeout: Optional[int] = None) -> None: - if self._thread is not None: - try: - self._thread.join(timeout) - except (ReferenceError, RuntimeError): - # Thread already terminated, or not yet started. - pass - - def wake(self) -> None: - """Execute the target function soon.""" - self._event = True - - def update_interval(self, new_interval: int) -> None: - self._interval = new_interval - - def skip_sleep(self) -> None: - self._skip_sleep = True - - async def _should_stop(self) -> bool: - async with self._lock: - if self._stopped: - self._thread_will_exit = True - return True - return False - - async def _run(self) -> None: - while not await self._should_stop(): - try: - if not await self._target(): - self._stopped = True - break - except BaseException: - async with self._lock: - self._stopped = True - self._thread_will_exit = True - - raise - - if self._skip_sleep: - self._skip_sleep = False - else: - deadline = time.monotonic() + self._interval - while not self._stopped and time.monotonic() < deadline: - await asyncio.sleep(self._min_interval) - if self._event: - break # Early wake. - - self._event = False - - -# _EXECUTORS has a weakref to each running PeriodicExecutor. Once started, -# an executor is kept alive by a strong reference from its thread and perhaps -# from other objects. When the thread dies and all other referrers are freed, -# the executor is freed and removed from _EXECUTORS. If any threads are -# running when the interpreter begins to shut down, we try to halt and join -# them to avoid spurious errors. -_EXECUTORS = set() - - -def _register_executor(executor: PeriodicExecutor) -> None: - ref = weakref.ref(executor, _on_executor_deleted) - _EXECUTORS.add(ref) - - -def _on_executor_deleted(ref: weakref.ReferenceType[PeriodicExecutor]) -> None: - _EXECUTORS.remove(ref) - - -def _shutdown_executors() -> None: - if _EXECUTORS is None: - return - - # Copy the set. Stopping threads has the side effect of removing executors. - executors = list(_EXECUTORS) - - # First signal all executors to close... - for ref in executors: - executor = ref() - if executor: - executor.close() - - # ...then try to join them. - for ref in executors: - executor = ref() - if executor: - executor.join(1) - - executor = None diff --git a/pymongo/asynchronous/pool.py b/pymongo/asynchronous/pool.py index ca0cebd417..5dc5675a0a 100644 --- a/pymongo/asynchronous/pool.py +++ b/pymongo/asynchronous/pool.py @@ -23,7 +23,6 @@ import socket import ssl import sys -import threading import time import weakref from typing import ( @@ -65,7 +64,11 @@ _CertificateError, ) from pymongo.hello import Hello, HelloCompat -from pymongo.lock import _ACondition, _ALock, _create_lock +from pymongo.lock import ( + _async_cond_wait, + _async_create_condition, + _async_create_lock, +) from pymongo.logger import ( _CONNECTION_LOGGER, _ConnectionStatusMessage, @@ -208,11 +211,6 @@ def _raise_connection_failure( raise AutoReconnect(msg) from error -async def _cond_wait(condition: _ACondition, deadline: Optional[float]) -> bool: - timeout = deadline - time.monotonic() if deadline else None - return await condition.wait(timeout) - - def _get_timeout_details(options: PoolOptions) -> dict[str, float]: details = {} timeout = _csot.get_timeout() @@ -706,6 +704,8 @@ def _close_conn(self) -> None: # shutdown. try: self.conn.close() + except asyncio.CancelledError: + raise except Exception: # noqa: S110 pass @@ -992,8 +992,8 @@ def __init__( # from the right side. self.conns: collections.deque = collections.deque() self.active_contexts: set[_CancellationContext] = set() - _lock = _create_lock() - self.lock = _ALock(_lock) + self.lock = _async_create_lock() + self._max_connecting_cond = _async_create_condition(self.lock) self.active_sockets = 0 # Monotonically increasing connection ID required for CMAP Events. self.next_connection_id = 1 @@ -1019,7 +1019,7 @@ def __init__( # The first portion of the wait queue. # Enforces: maxPoolSize # Also used for: clearing the wait queue - self.size_cond = _ACondition(threading.Condition(_lock)) + self.size_cond = _async_create_condition(self.lock) self.requests = 0 self.max_pool_size = self.opts.max_pool_size if not self.max_pool_size: @@ -1027,7 +1027,7 @@ def __init__( # The second portion of the wait queue. # Enforces: maxConnecting # Also used for: clearing the wait queue - self._max_connecting_cond = _ACondition(threading.Condition(_lock)) + self._max_connecting_cond = _async_create_condition(self.lock) self._max_connecting = self.opts.max_connecting self._pending = 0 self._client_id = client_id @@ -1466,7 +1466,8 @@ async def _get_conn( async with self.size_cond: self._raise_if_not_ready(checkout_started_time, emit_event=True) while not (self.requests < self.max_pool_size): - if not await _cond_wait(self.size_cond, deadline): + timeout = deadline - time.monotonic() if deadline else None + if not await _async_cond_wait(self.size_cond, timeout): # Timed out, notify the next thread to ensure a # timeout doesn't consume the condition. if self.requests < self.max_pool_size: @@ -1489,7 +1490,8 @@ async def _get_conn( async with self._max_connecting_cond: self._raise_if_not_ready(checkout_started_time, emit_event=False) while not (self.conns or self._pending < self._max_connecting): - if not await _cond_wait(self._max_connecting_cond, deadline): + timeout = deadline - time.monotonic() if deadline else None + if not await _async_cond_wait(self._max_connecting_cond, timeout): # Timed out, notify the next thread to ensure a # timeout doesn't consume the condition. if self.conns or self._pending < self._max_connecting: diff --git a/pymongo/asynchronous/topology.py b/pymongo/asynchronous/topology.py index 82af4257ba..6d67710a7e 100644 --- a/pymongo/asynchronous/topology.py +++ b/pymongo/asynchronous/topology.py @@ -27,8 +27,7 @@ from pathlib import Path from typing import TYPE_CHECKING, Any, Callable, Mapping, Optional, cast -from pymongo import _csot, common, helpers_shared -from pymongo.asynchronous import periodic_executor +from pymongo import _csot, common, helpers_shared, periodic_executor from pymongo.asynchronous.client_session import _ServerSession, _ServerSessionPool from pymongo.asynchronous.monitor import SrvMonitor from pymongo.asynchronous.pool import Pool @@ -44,7 +43,11 @@ WriteError, ) from pymongo.hello import Hello -from pymongo.lock import _ACondition, _ALock, _create_lock +from pymongo.lock import ( + _async_cond_wait, + _async_create_condition, + _async_create_lock, +) from pymongo.logger import ( _SDAM_LOGGER, _SERVER_SELECTION_LOGGER, @@ -170,9 +173,10 @@ def __init__(self, topology_settings: TopologySettings): self._seed_addresses = list(topology_description.server_descriptions()) self._opened = False self._closed = False - _lock = _create_lock() - self._lock = _ALock(_lock) - self._condition = _ACondition(self._settings.condition_class(_lock)) + self._lock = _async_create_lock() + self._condition = _async_create_condition( + self._lock, self._settings.condition_class if _IS_SYNC else None + ) self._servers: dict[_Address, Server] = {} self._pid: Optional[int] = None self._max_cluster_time: Optional[ClusterTime] = None @@ -185,7 +189,7 @@ def __init__(self, topology_settings: TopologySettings): async def target() -> bool: return process_events_queue(weak) - executor = periodic_executor.PeriodicExecutor( + executor = periodic_executor.AsyncPeriodicExecutor( interval=common.EVENTS_QUEUE_FREQUENCY, min_interval=common.MIN_HEARTBEAT_INTERVAL, target=target, @@ -354,7 +358,7 @@ async def _select_servers_loop( # change, or for a timeout. We won't miss any changes that # came after our most recent apply_selector call, since we've # held the lock until now. - await self._condition.wait(common.MIN_HEARTBEAT_INTERVAL) + await _async_cond_wait(self._condition, common.MIN_HEARTBEAT_INTERVAL) self._description.check_compatible() now = time.monotonic() server_descriptions = self._description.apply_selector( @@ -654,7 +658,7 @@ async def request_check_all(self, wait_time: int = 5) -> None: """Wake all monitors, wait for at least one to check its server.""" async with self._lock: self._request_check_all() - await self._condition.wait(wait_time) + await _async_cond_wait(self._condition, wait_time) def data_bearing_servers(self) -> list[ServerDescription]: """Return a list of all data-bearing servers. @@ -742,7 +746,7 @@ async def close(self) -> None: if self._publish_server or self._publish_tp: # Make sure the events executor thread is fully closed before publishing the remaining events self.__events_executor.close() - self.__events_executor.join(1) + await self.__events_executor.join(1) process_events_queue(weakref.ref(self._events)) # type: ignore[arg-type] @property diff --git a/pymongo/lock.py b/pymongo/lock.py index 0cbfb4a57e..6bf7138017 100644 --- a/pymongo/lock.py +++ b/pymongo/lock.py @@ -11,15 +11,20 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + +"""Internal helpers for lock and condition coordination primitives.""" + from __future__ import annotations import asyncio -import collections import os +import sys import threading -import time import weakref -from typing import Any, Callable, Optional, TypeVar +from asyncio import wait_for +from typing import Any, Optional, TypeVar + +import pymongo._asyncio_lock _HAS_REGISTER_AT_FORK = hasattr(os, "register_at_fork") @@ -28,6 +33,15 @@ _T = TypeVar("_T") +# Needed to support 3.13 asyncio fixes (https://github.com/python/cpython/issues/112202) +# in older versions of Python +if sys.version_info >= (3, 13): + Lock = asyncio.Lock + Condition = asyncio.Condition +else: + Lock = pymongo._asyncio_lock.Lock + Condition = pymongo._asyncio_lock.Condition + def _create_lock() -> threading.Lock: """Represents a lock that is tracked upon instantiation using a WeakSet and @@ -39,6 +53,27 @@ def _create_lock() -> threading.Lock: return lock +def _async_create_lock() -> Lock: + """Represents an asyncio.Lock.""" + return Lock() + + +def _create_condition( + lock: threading.Lock, condition_class: Optional[Any] = None +) -> threading.Condition: + """Represents a threading.Condition.""" + if condition_class: + return condition_class(lock) + return threading.Condition(lock) + + +def _async_create_condition(lock: Lock, condition_class: Optional[Any] = None) -> Condition: + """Represents an asyncio.Condition.""" + if condition_class: + return condition_class(lock) + return Condition(lock) + + def _release_locks() -> None: # Completed the fork, reset all the locks in the child. for lock in _forkable_locks: @@ -46,202 +81,12 @@ def _release_locks() -> None: lock.release() -# Needed only for synchro.py compat. -def _Lock(lock: threading.Lock) -> threading.Lock: - return lock +async def _async_cond_wait(condition: Condition, timeout: Optional[float]) -> bool: + try: + return await wait_for(condition.wait(), timeout) + except asyncio.TimeoutError: + return False -class _ALock: - __slots__ = ("_lock",) - - def __init__(self, lock: threading.Lock) -> None: - self._lock = lock - - def acquire(self, blocking: bool = True, timeout: float = -1) -> bool: - return self._lock.acquire(blocking=blocking, timeout=timeout) - - async def a_acquire(self, blocking: bool = True, timeout: float = -1) -> bool: - if timeout > 0: - tstart = time.monotonic() - while True: - acquired = self._lock.acquire(blocking=False) - if acquired: - return True - if timeout > 0 and (time.monotonic() - tstart) > timeout: - return False - if not blocking: - return False - await asyncio.sleep(0) - - def release(self) -> None: - self._lock.release() - - async def __aenter__(self) -> _ALock: - await self.a_acquire() - return self - - def __enter__(self) -> _ALock: - self._lock.acquire() - return self - - def __exit__(self, exc_type: Any, exc: Any, tb: Any) -> None: - self.release() - - async def __aexit__(self, exc_type: Any, exc: Any, tb: Any) -> None: - self.release() - - -def _safe_set_result(fut: asyncio.Future) -> None: - # Ensure the future hasn't been cancelled before calling set_result. - if not fut.done(): - fut.set_result(False) - - -class _ACondition: - __slots__ = ("_condition", "_waiters") - - def __init__(self, condition: threading.Condition) -> None: - self._condition = condition - self._waiters: collections.deque = collections.deque() - - async def acquire(self, blocking: bool = True, timeout: float = -1) -> bool: - if timeout > 0: - tstart = time.monotonic() - while True: - acquired = self._condition.acquire(blocking=False) - if acquired: - return True - if timeout > 0 and (time.monotonic() - tstart) > timeout: - return False - if not blocking: - return False - await asyncio.sleep(0) - - async def wait(self, timeout: Optional[float] = None) -> bool: - """Wait until notified. - - If the calling task has not acquired the lock when this - method is called, a RuntimeError is raised. - - This method releases the underlying lock, and then blocks - until it is awakened by a notify() or notify_all() call for - the same condition variable in another task. Once - awakened, it re-acquires the lock and returns True. - - This method may return spuriously, - which is why the caller should always - re-check the state and be prepared to wait() again. - """ - loop = asyncio.get_running_loop() - fut = loop.create_future() - self._waiters.append((loop, fut)) - self.release() - try: - try: - try: - await asyncio.wait_for(fut, timeout) - return True - except asyncio.TimeoutError: - return False # Return false on timeout for sync pool compat. - finally: - # Must re-acquire lock even if wait is cancelled. - # We only catch CancelledError here, since we don't want any - # other (fatal) errors with the future to cause us to spin. - err = None - while True: - try: - await self.acquire() - break - except asyncio.exceptions.CancelledError as e: - err = e - - self._waiters.remove((loop, fut)) - if err is not None: - try: - raise err # Re-raise most recent exception instance. - finally: - err = None # Break reference cycles. - except BaseException: - # Any error raised out of here _may_ have occurred after this Task - # believed to have been successfully notified. - # Make sure to notify another Task instead. This may result - # in a "spurious wakeup", which is allowed as part of the - # Condition Variable protocol. - self.notify(1) - raise - - async def wait_for(self, predicate: Callable[[], _T]) -> _T: - """Wait until a predicate becomes true. - - The predicate should be a callable whose result will be - interpreted as a boolean value. The method will repeatedly - wait() until it evaluates to true. The final predicate value is - the return value. - """ - result = predicate() - while not result: - await self.wait() - result = predicate() - return result - - def notify(self, n: int = 1) -> None: - """By default, wake up one coroutine waiting on this condition, if any. - If the calling coroutine has not acquired the lock when this method - is called, a RuntimeError is raised. - - This method wakes up at most n of the coroutines waiting for the - condition variable; it is a no-op if no coroutines are waiting. - - Note: an awakened coroutine does not actually return from its - wait() call until it can reacquire the lock. Since notify() does - not release the lock, its caller should. - """ - idx = 0 - to_remove = [] - for loop, fut in self._waiters: - if idx >= n: - break - - if fut.done(): - continue - - try: - loop.call_soon_threadsafe(_safe_set_result, fut) - except RuntimeError: - # Loop was closed, ignore. - to_remove.append((loop, fut)) - continue - - idx += 1 - - for waiter in to_remove: - self._waiters.remove(waiter) - - def notify_all(self) -> None: - """Wake up all threads waiting on this condition. This method acts - like notify(), but wakes up all waiting threads instead of one. If the - calling thread has not acquired the lock when this method is called, - a RuntimeError is raised. - """ - self.notify(len(self._waiters)) - - def locked(self) -> bool: - """Only needed for tests in test_locks.""" - return self._condition._lock.locked() # type: ignore[attr-defined] - - def release(self) -> None: - self._condition.release() - - async def __aenter__(self) -> _ACondition: - await self.acquire() - return self - - def __enter__(self) -> _ACondition: - self._condition.acquire() - return self - - async def __aexit__(self, exc_type: Any, exc: Any, tb: Any) -> None: - self.release() - - def __exit__(self, exc_type: Any, exc: Any, tb: Any) -> None: - self.release() +def _cond_wait(condition: threading.Condition, timeout: Optional[float]) -> bool: + return condition.wait(timeout) diff --git a/pymongo/network_layer.py b/pymongo/network_layer.py index aa16e85a07..6ab6db2f7d 100644 --- a/pymongo/network_layer.py +++ b/pymongo/network_layer.py @@ -29,6 +29,7 @@ ) from pymongo import _csot, ssl_support +from pymongo._asyncio_task import create_task from pymongo.errors import _OperationCancelled from pymongo.socket_checker import _errno_from_exception @@ -259,19 +260,20 @@ async def async_receive_data( sock.settimeout(0.0) loop = asyncio.get_event_loop() - cancellation_task = asyncio.create_task(_poll_cancellation(conn)) + cancellation_task = create_task(_poll_cancellation(conn)) try: if _HAVE_SSL and isinstance(sock, (SSLSocket, _sslConn)): - read_task = asyncio.create_task(_async_receive_ssl(sock, length, loop)) # type: ignore[arg-type] + read_task = create_task(_async_receive_ssl(sock, length, loop)) # type: ignore[arg-type] else: - read_task = asyncio.create_task(_async_receive(sock, length, loop)) # type: ignore[arg-type] + read_task = create_task(_async_receive(sock, length, loop)) # type: ignore[arg-type] tasks = [read_task, cancellation_task] done, pending = await asyncio.wait( tasks, timeout=timeout, return_when=asyncio.FIRST_COMPLETED ) for task in pending: task.cancel() - await asyncio.wait(pending) + if pending: + await asyncio.wait(pending) if len(done) == 0: raise socket.timeout("timed out") if read_task in done: diff --git a/pymongo/synchronous/periodic_executor.py b/pymongo/periodic_executor.py similarity index 67% rename from pymongo/synchronous/periodic_executor.py rename to pymongo/periodic_executor.py index 525268b14b..2f89b91deb 100644 --- a/pymongo/synchronous/periodic_executor.py +++ b/pymongo/periodic_executor.py @@ -23,9 +23,102 @@ import weakref from typing import Any, Optional +from pymongo._asyncio_task import create_task from pymongo.lock import _create_lock -_IS_SYNC = True +_IS_SYNC = False + + +class AsyncPeriodicExecutor: + def __init__( + self, + interval: float, + min_interval: float, + target: Any, + name: Optional[str] = None, + ): + """Run a target function periodically on a background task. + + If the target's return value is false, the executor stops. + + :param interval: Seconds between calls to `target`. + :param min_interval: Minimum seconds between calls if `wake` is + called very often. + :param target: A function. + :param name: A name to give the underlying task. + """ + self._event = False + self._interval = interval + self._min_interval = min_interval + self._target = target + self._stopped = False + self._task: Optional[asyncio.Task] = None + self._name = name + self._skip_sleep = False + + def __repr__(self) -> str: + return f"<{self.__class__.__name__}(name={self._name}) object at 0x{id(self):x}>" + + def open(self) -> None: + """Start. Multiple calls have no effect.""" + self._stopped = False + + if self._task is None or ( + self._task.done() and not self._task.cancelled() and not self._task.cancelling() # type: ignore[unused-ignore, attr-defined] + ): + self._task = create_task(self._run(), name=self._name) + + def close(self, dummy: Any = None) -> None: + """Stop. To restart, call open(). + + The dummy parameter allows an executor's close method to be a weakref + callback; see monitor.py. + """ + self._stopped = True + + async def join(self, timeout: Optional[int] = None) -> None: + if self._task is not None: + try: + await asyncio.wait_for(self._task, timeout=timeout) # type-ignore: [arg-type] + except asyncio.TimeoutError: + # Task timed out + pass + except asyncio.exceptions.CancelledError: + # Task was already finished, or not yet started. + raise + + def wake(self) -> None: + """Execute the target function soon.""" + self._event = True + + def update_interval(self, new_interval: int) -> None: + self._interval = new_interval + + def skip_sleep(self) -> None: + self._skip_sleep = True + + async def _run(self) -> None: + while not self._stopped: + if self._task and self._task.cancelling(): # type: ignore[unused-ignore, attr-defined] + raise asyncio.CancelledError + try: + if not await self._target(): + self._stopped = True + break + except BaseException: + self._stopped = True + raise + + if self._skip_sleep: + self._skip_sleep = False + else: + deadline = time.monotonic() + self._interval + while not self._stopped and time.monotonic() < deadline: + await asyncio.sleep(self._min_interval) + if self._event: + break # Early wake. + + self._event = False class PeriodicExecutor: @@ -64,19 +157,6 @@ def __init__( def __repr__(self) -> str: return f"<{self.__class__.__name__}(name={self._name}) object at 0x{id(self):x}>" - def _run_async(self) -> None: - # The default asyncio loop implementation on Windows - # has issues with sharing sockets across loops (https://github.com/python/cpython/issues/122240) - # We explicitly use a different loop implementation here to prevent that issue - if sys.platform == "win32": - loop = asyncio.SelectorEventLoop() - try: - loop.run_until_complete(self._run()) # type: ignore[func-returns-value] - finally: - loop.close() - else: - asyncio.run(self._run()) # type: ignore[func-returns-value] - def open(self) -> None: """Start. Multiple calls have no effect. @@ -104,10 +184,7 @@ def open(self) -> None: pass if not started: - if _IS_SYNC: - thread = threading.Thread(target=self._run, name=self._name) - else: - thread = threading.Thread(target=self._run_async, name=self._name) + thread = threading.Thread(target=self._run, name=self._name) thread.daemon = True self._thread = weakref.proxy(thread) _register_executor(self) diff --git a/pymongo/synchronous/client_bulk.py b/pymongo/synchronous/client_bulk.py index 625e8429eb..9f6e3f7cf0 100644 --- a/pymongo/synchronous/client_bulk.py +++ b/pymongo/synchronous/client_bulk.py @@ -474,7 +474,6 @@ def _process_results_cursor( if op_type == "delete": res = DeleteResult(doc, acknowledged=True) # type: ignore[assignment] full_result[f"{op_type}Results"][original_index] = res - except Exception as exc: # Attempt to close the cursor, then raise top-level error. if cmd_cursor.alive: diff --git a/pymongo/synchronous/cursor.py b/pymongo/synchronous/cursor.py index 27a76cf91d..9a7637704f 100644 --- a/pymongo/synchronous/cursor.py +++ b/pymongo/synchronous/cursor.py @@ -77,7 +77,7 @@ class _ConnectionManager: def __init__(self, conn: Connection, more_to_come: bool): self.conn: Optional[Connection] = conn self.more_to_come = more_to_come - self._alock = _create_lock() + self._lock = _create_lock() def update_exhaust(self, more_to_come: bool) -> None: self.more_to_come = more_to_come diff --git a/pymongo/synchronous/encryption.py b/pymongo/synchronous/encryption.py index 506ff8bcba..09d0c0f2fd 100644 --- a/pymongo/synchronous/encryption.py +++ b/pymongo/synchronous/encryption.py @@ -15,6 +15,7 @@ """Support for explicit client-side field level encryption.""" from __future__ import annotations +import asyncio import contextlib import enum import socket @@ -111,6 +112,8 @@ def _wrap_encryption_errors() -> Iterator[None]: # BSON encoding/decoding errors are unrelated to encryption so # we should propagate them unchanged. raise + except asyncio.CancelledError: + raise except Exception as exc: raise EncryptionError(exc) from exc @@ -200,6 +203,8 @@ def kms_request(self, kms_context: MongoCryptKmsContext) -> None: conn.close() except (PyMongoError, MongoCryptError): raise # Propagate pymongo errors directly. + except asyncio.CancelledError: + raise except Exception as error: # Wrap I/O errors in PyMongo exceptions. _raise_connection_failure((host, port), error) @@ -716,6 +721,8 @@ def create_encrypted_collection( database.create_collection(name=name, **kwargs), encrypted_fields, ) + except asyncio.CancelledError: + raise except Exception as exc: raise EncryptedCollectionError(exc, encrypted_fields) from exc diff --git a/pymongo/synchronous/mongo_client.py b/pymongo/synchronous/mongo_client.py index 00c6203a94..a694a58c1e 100644 --- a/pymongo/synchronous/mongo_client.py +++ b/pymongo/synchronous/mongo_client.py @@ -32,6 +32,7 @@ """ from __future__ import annotations +import asyncio import contextlib import os import warnings @@ -58,7 +59,7 @@ from bson.codec_options import DEFAULT_CODEC_OPTIONS, CodecOptions, TypeRegistry from bson.timestamp import Timestamp -from pymongo import _csot, common, helpers_shared, uri_parser +from pymongo import _csot, common, helpers_shared, periodic_executor, uri_parser from pymongo.client_options import ClientOptions from pymongo.errors import ( AutoReconnect, @@ -74,7 +75,11 @@ WaitQueueTimeoutError, WriteConcernError, ) -from pymongo.lock import _HAS_REGISTER_AT_FORK, _create_lock, _release_locks +from pymongo.lock import ( + _HAS_REGISTER_AT_FORK, + _create_lock, + _release_locks, +) from pymongo.logger import _CLIENT_LOGGER, _log_or_warn from pymongo.message import _CursorAddress, _GetMore, _Query from pymongo.monitoring import ConnectionClosedReason @@ -91,7 +96,7 @@ from pymongo.results import ClientBulkWriteResult from pymongo.server_selectors import writable_server_selector from pymongo.server_type import SERVER_TYPE -from pymongo.synchronous import client_session, database, periodic_executor +from pymongo.synchronous import client_session, database from pymongo.synchronous.change_stream import ChangeStream, ClusterChangeStream from pymongo.synchronous.client_bulk import _ClientBulk from pymongo.synchronous.client_session import _EmptyServerSession @@ -1716,7 +1721,7 @@ def _run_operation( address=address, ) - with operation.conn_mgr._alock: + with operation.conn_mgr._lock: with _MongoClientErrorHandler(self, server, operation.session) as err_handler: # type: ignore[arg-type] err_handler.contribute_socket(operation.conn_mgr.conn) return server.run_operation( @@ -1964,7 +1969,7 @@ def _close_cursor_now( try: if conn_mgr: - with conn_mgr._alock: + with conn_mgr._lock: # Cursor is pinned to LB outside of a transaction. assert address is not None assert conn_mgr.conn is not None @@ -2027,6 +2032,8 @@ def _process_kill_cursors(self) -> None: for address, cursor_id, conn_mgr in pinned_cursors: try: self._cleanup_cursor_lock(cursor_id, address, conn_mgr, None, False) + except asyncio.CancelledError: + raise except Exception as exc: if isinstance(exc, InvalidOperation) and self._topology._closed: # Raise the exception when client is closed so that it @@ -2041,6 +2048,8 @@ def _process_kill_cursors(self) -> None: for address, cursor_ids in address_to_cursor_ids.items(): try: self._kill_cursors(cursor_ids, address, topology, session=None) + except asyncio.CancelledError: + raise except Exception as exc: if isinstance(exc, InvalidOperation) and self._topology._closed: raise @@ -2055,6 +2064,8 @@ def _process_periodic_tasks(self) -> None: try: self._process_kill_cursors() self._topology.update_pool() + except asyncio.CancelledError: + raise except Exception as exc: if isinstance(exc, InvalidOperation) and self._topology._closed: return diff --git a/pymongo/synchronous/monitor.py b/pymongo/synchronous/monitor.py index d02ad0a6fd..df4130d4ab 100644 --- a/pymongo/synchronous/monitor.py +++ b/pymongo/synchronous/monitor.py @@ -16,24 +16,24 @@ from __future__ import annotations +import asyncio import atexit import logging import time import weakref from typing import TYPE_CHECKING, Any, Mapping, Optional, cast -from pymongo import common +from pymongo import common, periodic_executor from pymongo._csot import MovingMinimum from pymongo.errors import NetworkTimeout, NotPrimaryError, OperationFailure, _OperationCancelled from pymongo.hello import Hello from pymongo.lock import _create_lock from pymongo.logger import _SDAM_LOGGER, _debug_log, _SDAMStatusMessage +from pymongo.periodic_executor import _shutdown_executors from pymongo.pool_options import _is_faas from pymongo.read_preferences import MovingAverage from pymongo.server_description import ServerDescription from pymongo.srv_resolver import _SrvResolver -from pymongo.synchronous import periodic_executor -from pymongo.synchronous.periodic_executor import _shutdown_executors if TYPE_CHECKING: from pymongo.synchronous.pool import Connection, Pool, _CancellationContext @@ -238,6 +238,9 @@ def _run(self) -> None: except ReferenceError: # Topology was garbage-collected. self.close() + finally: + if self._executor._stopped: + self._rtt_monitor.close() def _check_server(self) -> ServerDescription: """Call hello or read the next streaming response. @@ -254,6 +257,8 @@ def _check_server(self) -> ServerDescription: details = cast(Mapping[str, Any], exc.details) self._topology.receive_cluster_time(details.get("$clusterTime")) raise + except asyncio.CancelledError: + raise except ReferenceError: raise except Exception as error: @@ -419,6 +424,8 @@ def _get_seedlist(self) -> Optional[list[tuple[str, Any]]]: if len(seedlist) == 0: # As per the spec: this should be treated as a failure. raise Exception + except asyncio.CancelledError: + raise except Exception: # As per the spec, upon encountering an error: # - An error must not be raised @@ -482,6 +489,8 @@ def _run(self) -> None: except ReferenceError: # Topology was garbage-collected. self.close() + except asyncio.CancelledError: + raise except Exception: self._pool.reset() @@ -536,4 +545,5 @@ def _shutdown_resources() -> None: shutdown() -atexit.register(_shutdown_resources) +if _IS_SYNC: + atexit.register(_shutdown_resources) diff --git a/pymongo/synchronous/pool.py b/pymongo/synchronous/pool.py index 86baf15b9a..1a155c82d7 100644 --- a/pymongo/synchronous/pool.py +++ b/pymongo/synchronous/pool.py @@ -23,7 +23,6 @@ import socket import ssl import sys -import threading import time import weakref from typing import ( @@ -62,7 +61,11 @@ _CertificateError, ) from pymongo.hello import Hello, HelloCompat -from pymongo.lock import _create_lock, _Lock +from pymongo.lock import ( + _cond_wait, + _create_condition, + _create_lock, +) from pymongo.logger import ( _CONNECTION_LOGGER, _ConnectionStatusMessage, @@ -208,11 +211,6 @@ def _raise_connection_failure( raise AutoReconnect(msg) from error -def _cond_wait(condition: threading.Condition, deadline: Optional[float]) -> bool: - timeout = deadline - time.monotonic() if deadline else None - return condition.wait(timeout) - - def _get_timeout_details(options: PoolOptions) -> dict[str, float]: details = {} timeout = _csot.get_timeout() @@ -704,6 +702,8 @@ def _close_conn(self) -> None: # shutdown. try: self.conn.close() + except asyncio.CancelledError: + raise except Exception: # noqa: S110 pass @@ -988,8 +988,8 @@ def __init__( # from the right side. self.conns: collections.deque = collections.deque() self.active_contexts: set[_CancellationContext] = set() - _lock = _create_lock() - self.lock = _Lock(_lock) + self.lock = _create_lock() + self._max_connecting_cond = _create_condition(self.lock) self.active_sockets = 0 # Monotonically increasing connection ID required for CMAP Events. self.next_connection_id = 1 @@ -1015,7 +1015,7 @@ def __init__( # The first portion of the wait queue. # Enforces: maxPoolSize # Also used for: clearing the wait queue - self.size_cond = threading.Condition(_lock) + self.size_cond = _create_condition(self.lock) self.requests = 0 self.max_pool_size = self.opts.max_pool_size if not self.max_pool_size: @@ -1023,7 +1023,7 @@ def __init__( # The second portion of the wait queue. # Enforces: maxConnecting # Also used for: clearing the wait queue - self._max_connecting_cond = threading.Condition(_lock) + self._max_connecting_cond = _create_condition(self.lock) self._max_connecting = self.opts.max_connecting self._pending = 0 self._client_id = client_id @@ -1460,7 +1460,8 @@ def _get_conn( with self.size_cond: self._raise_if_not_ready(checkout_started_time, emit_event=True) while not (self.requests < self.max_pool_size): - if not _cond_wait(self.size_cond, deadline): + timeout = deadline - time.monotonic() if deadline else None + if not _cond_wait(self.size_cond, timeout): # Timed out, notify the next thread to ensure a # timeout doesn't consume the condition. if self.requests < self.max_pool_size: @@ -1483,7 +1484,8 @@ def _get_conn( with self._max_connecting_cond: self._raise_if_not_ready(checkout_started_time, emit_event=False) while not (self.conns or self._pending < self._max_connecting): - if not _cond_wait(self._max_connecting_cond, deadline): + timeout = deadline - time.monotonic() if deadline else None + if not _cond_wait(self._max_connecting_cond, timeout): # Timed out, notify the next thread to ensure a # timeout doesn't consume the condition. if self.conns or self._pending < self._max_connecting: diff --git a/pymongo/synchronous/topology.py b/pymongo/synchronous/topology.py index a350c1702e..b03269ae43 100644 --- a/pymongo/synchronous/topology.py +++ b/pymongo/synchronous/topology.py @@ -27,7 +27,7 @@ from pathlib import Path from typing import TYPE_CHECKING, Any, Callable, Mapping, Optional, cast -from pymongo import _csot, common, helpers_shared +from pymongo import _csot, common, helpers_shared, periodic_executor from pymongo.errors import ( ConnectionFailure, InvalidOperation, @@ -39,7 +39,11 @@ WriteError, ) from pymongo.hello import Hello -from pymongo.lock import _create_lock, _Lock +from pymongo.lock import ( + _cond_wait, + _create_condition, + _create_lock, +) from pymongo.logger import ( _SDAM_LOGGER, _SERVER_SELECTION_LOGGER, @@ -56,7 +60,6 @@ secondary_server_selector, writable_server_selector, ) -from pymongo.synchronous import periodic_executor from pymongo.synchronous.client_session import _ServerSession, _ServerSessionPool from pymongo.synchronous.monitor import SrvMonitor from pymongo.synchronous.pool import Pool @@ -170,9 +173,10 @@ def __init__(self, topology_settings: TopologySettings): self._seed_addresses = list(topology_description.server_descriptions()) self._opened = False self._closed = False - _lock = _create_lock() - self._lock = _Lock(_lock) - self._condition = self._settings.condition_class(_lock) + self._lock = _create_lock() + self._condition = _create_condition( + self._lock, self._settings.condition_class if _IS_SYNC else None + ) self._servers: dict[_Address, Server] = {} self._pid: Optional[int] = None self._max_cluster_time: Optional[ClusterTime] = None @@ -354,7 +358,7 @@ def _select_servers_loop( # change, or for a timeout. We won't miss any changes that # came after our most recent apply_selector call, since we've # held the lock until now. - self._condition.wait(common.MIN_HEARTBEAT_INTERVAL) + _cond_wait(self._condition, common.MIN_HEARTBEAT_INTERVAL) self._description.check_compatible() now = time.monotonic() server_descriptions = self._description.apply_selector( @@ -652,7 +656,7 @@ def request_check_all(self, wait_time: int = 5) -> None: """Wake all monitors, wait for at least one to check its server.""" with self._lock: self._request_check_all() - self._condition.wait(wait_time) + _cond_wait(self._condition, wait_time) def data_bearing_servers(self) -> list[ServerDescription]: """Return a list of all data-bearing servers. diff --git a/test/__init__.py b/test/__init__.py index fd33fde293..d3a63db2d5 100644 --- a/test/__init__.py +++ b/test/__init__.py @@ -17,6 +17,7 @@ import asyncio import gc +import logging import multiprocessing import os import signal @@ -25,6 +26,7 @@ import sys import threading import time +import traceback import unittest import warnings from asyncio import iscoroutinefunction @@ -191,6 +193,8 @@ def _connect(self, host, port, **kwargs): client.close() def _init_client(self): + self.mongoses = [] + self.connection_attempts = [] self.client = self._connect(host, port) if self.client is not None: # Return early when connected to dataLake as mongohoused does not @@ -860,6 +864,16 @@ def max_message_size_bytes(self): client_context = ClientContext() +def reset_client_context(): + if _IS_SYNC: + # sync tests don't need to reset a client context + return + elif client_context.client is not None: + client_context.client.close() + client_context.client = None + client_context._init_client() + + class PyMongoTestCase(unittest.TestCase): def assertEqualCommand(self, expected, actual, msg=None): self.assertEqual(sanitize_cmd(expected), sanitize_cmd(actual), msg) @@ -1106,26 +1120,10 @@ def enable_replication(self, client): class UnitTest(PyMongoTestCase): """Async base class for TestCases that don't require a connection to MongoDB.""" - @classmethod - def setUpClass(cls): - if _IS_SYNC: - cls._setup_class() - else: - asyncio.run(cls._setup_class()) - - @classmethod - def tearDownClass(cls): - if _IS_SYNC: - cls._tearDown_class() - else: - asyncio.run(cls._tearDown_class()) - - @classmethod - def _setup_class(cls): + def setUp(self) -> None: pass - @classmethod - def _tearDown_class(cls): + def tearDown(self) -> None: pass @@ -1136,37 +1134,20 @@ class IntegrationTest(PyMongoTestCase): db: Database credentials: Dict[str, str] - @classmethod - def setUpClass(cls): - if _IS_SYNC: - cls._setup_class() - else: - asyncio.run(cls._setup_class()) - - @classmethod - def tearDownClass(cls): - if _IS_SYNC: - cls._tearDown_class() - else: - asyncio.run(cls._tearDown_class()) - - @classmethod @client_context.require_connection - def _setup_class(cls): - if client_context.load_balancer and not getattr(cls, "RUN_ON_LOAD_BALANCER", False): + def setUp(self) -> None: + if not _IS_SYNC: + reset_client_context() + if client_context.load_balancer and not getattr(self, "RUN_ON_LOAD_BALANCER", False): raise SkipTest("this test does not support load balancers") - if client_context.serverless and not getattr(cls, "RUN_ON_SERVERLESS", False): + if client_context.serverless and not getattr(self, "RUN_ON_SERVERLESS", False): raise SkipTest("this test does not support serverless") - cls.client = client_context.client - cls.db = cls.client.pymongo_test + self.client = client_context.client + self.db = self.client.pymongo_test if client_context.auth_enabled: - cls.credentials = {"username": db_user, "password": db_pwd} + self.credentials = {"username": db_user, "password": db_pwd} else: - cls.credentials = {} - - @classmethod - def _tearDown_class(cls): - pass + self.credentials = {} def cleanup_colls(self, *collections): """Cleanup collections faster than drop_collection.""" @@ -1192,37 +1173,14 @@ class MockClientTest(UnitTest): # MockClients tests that use replicaSet, directConnection=True, pass # multiple seed addresses, or wait for heartbeat events are incompatible # with loadBalanced=True. - @classmethod - def setUpClass(cls): - if _IS_SYNC: - cls._setup_class() - else: - asyncio.run(cls._setup_class()) - - @classmethod - def tearDownClass(cls): - if _IS_SYNC: - cls._tearDown_class() - else: - asyncio.run(cls._tearDown_class()) - - @classmethod @client_context.require_no_load_balancer - def _setup_class(cls): - pass - - @classmethod - def _tearDown_class(cls): - pass - - def setUp(self): + def setUp(self) -> None: super().setUp() self.client_knobs = client_knobs(heartbeat_frequency=0.001, min_heartbeat_interval=0.001) - self.client_knobs.enable() - def tearDown(self): + def tearDown(self) -> None: self.client_knobs.disable() super().tearDown() @@ -1253,7 +1211,6 @@ def teardown(): c.drop_database("pymongo_test_mike") c.drop_database("pymongo_test_bernie") c.close() - print_running_clients() diff --git a/test/asynchronous/__init__.py b/test/asynchronous/__init__.py index 0579828c49..73e2824742 100644 --- a/test/asynchronous/__init__.py +++ b/test/asynchronous/__init__.py @@ -17,6 +17,7 @@ import asyncio import gc +import logging import multiprocessing import os import signal @@ -25,6 +26,7 @@ import sys import threading import time +import traceback import unittest import warnings from asyncio import iscoroutinefunction @@ -191,6 +193,8 @@ async def _connect(self, host, port, **kwargs): await client.close() async def _init_client(self): + self.mongoses = [] + self.connection_attempts = [] self.client = await self._connect(host, port) if self.client is not None: # Return early when connected to dataLake as mongohoused does not @@ -862,6 +866,16 @@ async def max_message_size_bytes(self): async_client_context = AsyncClientContext() +async def reset_client_context(): + if _IS_SYNC: + # sync tests don't need to reset a client context + return + elif async_client_context.client is not None: + await async_client_context.client.close() + async_client_context.client = None + await async_client_context._init_client() + + class AsyncPyMongoTestCase(unittest.IsolatedAsyncioTestCase): def assertEqualCommand(self, expected, actual, msg=None): self.assertEqual(sanitize_cmd(expected), sanitize_cmd(actual), msg) @@ -1124,26 +1138,10 @@ async def enable_replication(self, client): class AsyncUnitTest(AsyncPyMongoTestCase): """Async base class for TestCases that don't require a connection to MongoDB.""" - @classmethod - def setUpClass(cls): - if _IS_SYNC: - cls._setup_class() - else: - asyncio.run(cls._setup_class()) - - @classmethod - def tearDownClass(cls): - if _IS_SYNC: - cls._tearDown_class() - else: - asyncio.run(cls._tearDown_class()) - - @classmethod - async def _setup_class(cls): + async def asyncSetUp(self) -> None: pass - @classmethod - async def _tearDown_class(cls): + async def asyncTearDown(self) -> None: pass @@ -1154,37 +1152,20 @@ class AsyncIntegrationTest(AsyncPyMongoTestCase): db: AsyncDatabase credentials: Dict[str, str] - @classmethod - def setUpClass(cls): - if _IS_SYNC: - cls._setup_class() - else: - asyncio.run(cls._setup_class()) - - @classmethod - def tearDownClass(cls): - if _IS_SYNC: - cls._tearDown_class() - else: - asyncio.run(cls._tearDown_class()) - - @classmethod @async_client_context.require_connection - async def _setup_class(cls): - if async_client_context.load_balancer and not getattr(cls, "RUN_ON_LOAD_BALANCER", False): + async def asyncSetUp(self) -> None: + if not _IS_SYNC: + await reset_client_context() + if async_client_context.load_balancer and not getattr(self, "RUN_ON_LOAD_BALANCER", False): raise SkipTest("this test does not support load balancers") - if async_client_context.serverless and not getattr(cls, "RUN_ON_SERVERLESS", False): + if async_client_context.serverless and not getattr(self, "RUN_ON_SERVERLESS", False): raise SkipTest("this test does not support serverless") - cls.client = async_client_context.client - cls.db = cls.client.pymongo_test + self.client = async_client_context.client + self.db = self.client.pymongo_test if async_client_context.auth_enabled: - cls.credentials = {"username": db_user, "password": db_pwd} + self.credentials = {"username": db_user, "password": db_pwd} else: - cls.credentials = {} - - @classmethod - async def _tearDown_class(cls): - pass + self.credentials = {} async def cleanup_colls(self, *collections): """Cleanup collections faster than drop_collection.""" @@ -1210,39 +1191,16 @@ class AsyncMockClientTest(AsyncUnitTest): # MockClients tests that use replicaSet, directConnection=True, pass # multiple seed addresses, or wait for heartbeat events are incompatible # with loadBalanced=True. - @classmethod - def setUpClass(cls): - if _IS_SYNC: - cls._setup_class() - else: - asyncio.run(cls._setup_class()) - - @classmethod - def tearDownClass(cls): - if _IS_SYNC: - cls._tearDown_class() - else: - asyncio.run(cls._tearDown_class()) - - @classmethod @async_client_context.require_no_load_balancer - async def _setup_class(cls): - pass - - @classmethod - async def _tearDown_class(cls): - pass - - def setUp(self): - super().setUp() + async def asyncSetUp(self) -> None: + await super().asyncSetUp() self.client_knobs = client_knobs(heartbeat_frequency=0.001, min_heartbeat_interval=0.001) - self.client_knobs.enable() - def tearDown(self): + async def asyncTearDown(self) -> None: self.client_knobs.disable() - super().tearDown() + await super().asyncTearDown() async def async_setup(): @@ -1271,7 +1229,6 @@ async def async_teardown(): await c.drop_database("pymongo_test_mike") await c.drop_database("pymongo_test_bernie") await c.close() - print_running_clients() diff --git a/test/asynchronous/conftest.py b/test/asynchronous/conftest.py index e443dff6c0..a27a9f213d 100644 --- a/test/asynchronous/conftest.py +++ b/test/asynchronous/conftest.py @@ -22,7 +22,7 @@ def event_loop_policy(): return asyncio.get_event_loop_policy() -@pytest_asyncio.fixture(scope="session", autouse=True) +@pytest_asyncio.fixture(scope="package", autouse=True) async def test_setup_and_teardown(): await async_setup() yield diff --git a/test/asynchronous/test_bulk.py b/test/asynchronous/test_bulk.py index c9ff167b43..7191a412c1 100644 --- a/test/asynchronous/test_bulk.py +++ b/test/asynchronous/test_bulk.py @@ -42,15 +42,11 @@ class AsyncBulkTestBase(AsyncIntegrationTest): coll: AsyncCollection coll_w0: AsyncCollection - @classmethod - async def _setup_class(cls): - await super()._setup_class() - cls.coll = cls.db.test - cls.coll_w0 = cls.coll.with_options(write_concern=WriteConcern(w=0)) - async def asyncSetUp(self): - super().setUp() + await super().asyncSetUp() + self.coll = self.db.test await self.coll.drop() + self.coll_w0 = self.coll.with_options(write_concern=WriteConcern(w=0)) def assertEqualResponse(self, expected, actual): """Compare response from bulk.execute() to expected response.""" @@ -787,14 +783,10 @@ async def test_large_inserts_unordered(self): class AsyncBulkAuthorizationTestBase(AsyncBulkTestBase): - @classmethod @async_client_context.require_auth @async_client_context.require_no_api_version - async def _setup_class(cls): - await super()._setup_class() - async def asyncSetUp(self): - super().setUp() + await super().asyncSetUp() await async_client_context.create_user(self.db.name, "readonly", "pw", ["read"]) await self.db.command( "createRole", @@ -937,21 +929,19 @@ class AsyncTestBulkWriteConcern(AsyncBulkTestBase): w: Optional[int] secondary: AsyncMongoClient - @classmethod - async def _setup_class(cls): - await super()._setup_class() - cls.w = async_client_context.w - cls.secondary = None - if cls.w is not None and cls.w > 1: + async def asyncSetUp(self): + await super().asyncSetUp() + self.w = async_client_context.w + self.secondary = None + if self.w is not None and self.w > 1: for member in (await async_client_context.hello)["hosts"]: if member != (await async_client_context.hello)["primary"]: - cls.secondary = await cls.unmanaged_async_single_client(*partition_node(member)) + self.secondary = await self.async_single_client(*partition_node(member)) break - @classmethod - async def async_tearDownClass(cls): - if cls.secondary: - await cls.secondary.close() + async def asyncTearDown(self): + if self.secondary: + await self.secondary.close() async def cause_wtimeout(self, requests, ordered): if not async_client_context.test_commands_enabled: diff --git a/test/asynchronous/test_change_stream.py b/test/asynchronous/test_change_stream.py index 8e16fe7528..08da00cc1e 100644 --- a/test/asynchronous/test_change_stream.py +++ b/test/asynchronous/test_change_stream.py @@ -836,18 +836,16 @@ async def test_split_large_change(self): class TestClusterAsyncChangeStream(TestAsyncChangeStreamBase, APITestsMixin): dbs: list - @classmethod @async_client_context.require_version_min(4, 0, 0, -1) @async_client_context.require_change_streams - async def _setup_class(cls): - await super()._setup_class() - cls.dbs = [cls.db, cls.client.pymongo_test_2] + async def asyncSetUp(self) -> None: + await super().asyncSetUp() + self.dbs = [self.db, self.client.pymongo_test_2] - @classmethod - async def _tearDown_class(cls): - for db in cls.dbs: - await cls.client.drop_database(db) - await super()._tearDown_class() + async def asyncTearDown(self): + for db in self.dbs: + await self.client.drop_database(db) + await super().asyncTearDown() async def change_stream_with_client(self, client, *args, **kwargs): return await client.watch(*args, **kwargs) @@ -898,11 +896,10 @@ async def test_full_pipeline(self): class TestAsyncDatabaseAsyncChangeStream(TestAsyncChangeStreamBase, APITestsMixin): - @classmethod @async_client_context.require_version_min(4, 0, 0, -1) @async_client_context.require_change_streams - async def _setup_class(cls): - await super()._setup_class() + async def asyncSetUp(self) -> None: + await super().asyncSetUp() async def change_stream_with_client(self, client, *args, **kwargs): return await client[self.db.name].watch(*args, **kwargs) @@ -988,12 +985,9 @@ async def test_isolation(self): class TestAsyncCollectionAsyncChangeStream( TestAsyncChangeStreamBase, APITestsMixin, ProseSpecTestsMixin ): - @classmethod @async_client_context.require_change_streams - async def _setup_class(cls): - await super()._setup_class() - async def asyncSetUp(self): + await super().asyncSetUp() # Use a new collection for each test. await self.watched_collection().drop() await self.watched_collection().insert_one({}) @@ -1133,20 +1127,11 @@ class TestAllLegacyScenarios(AsyncIntegrationTest): RUN_ON_LOAD_BALANCER = True listener: AllowListEventListener - @classmethod @async_client_context.require_connection - async def _setup_class(cls): - await super()._setup_class() - cls.listener = AllowListEventListener("aggregate", "getMore") - cls.client = await cls.unmanaged_async_rs_or_single_client(event_listeners=[cls.listener]) - - @classmethod - async def _tearDown_class(cls): - await cls.client.close() - await super()._tearDown_class() - - def asyncSetUp(self): - super().asyncSetUp() + async def asyncSetUp(self): + await super().asyncSetUp() + self.listener = AllowListEventListener("aggregate", "getMore") + self.client = await self.async_rs_or_single_client(event_listeners=[self.listener]) self.listener.reset() async def asyncSetUpCluster(self, scenario_dict): diff --git a/test/asynchronous/test_client.py b/test/asynchronous/test_client.py index 590154b857..db232386ee 100644 --- a/test/asynchronous/test_client.py +++ b/test/asynchronous/test_client.py @@ -73,7 +73,6 @@ is_greenthread_patched, lazy_client_trial, one, - wait_until, ) import bson @@ -131,16 +130,11 @@ class AsyncClientUnitTest(AsyncUnitTest): client: AsyncMongoClient - @classmethod - async def _setup_class(cls): - cls.client = await cls.unmanaged_async_rs_or_single_client( + async def asyncSetUp(self) -> None: + self.client = await self.async_rs_or_single_client( connect=False, serverSelectionTimeoutMS=100 ) - @classmethod - async def _tearDown_class(cls): - await cls.client.close() - @pytest.fixture(autouse=True) def inject_fixtures(self, caplog): self._caplog = caplog @@ -693,8 +687,8 @@ async def test_max_idle_time_reaper_removes_stale_minPoolSize(self): # When the reaper runs at the same time as the get_socket, two # connections could be created and checked into the pool. self.assertGreaterEqual(len(server._pool.conns), 1) - wait_until(lambda: conn not in server._pool.conns, "remove stale socket") - wait_until(lambda: len(server._pool.conns) >= 1, "replace stale socket") + await async_wait_until(lambda: conn not in server._pool.conns, "remove stale socket") + await async_wait_until(lambda: len(server._pool.conns) >= 1, "replace stale socket") async def test_max_idle_time_reaper_does_not_exceed_maxPoolSize(self): with client_knobs(kill_cursor_frequency=0.1): @@ -710,8 +704,8 @@ async def test_max_idle_time_reaper_does_not_exceed_maxPoolSize(self): # When the reaper runs at the same time as the get_socket, # maxPoolSize=1 should prevent two connections from being created. self.assertEqual(1, len(server._pool.conns)) - wait_until(lambda: conn not in server._pool.conns, "remove stale socket") - wait_until(lambda: len(server._pool.conns) == 1, "replace stale socket") + await async_wait_until(lambda: conn not in server._pool.conns, "remove stale socket") + await async_wait_until(lambda: len(server._pool.conns) == 1, "replace stale socket") async def test_max_idle_time_reaper_removes_stale(self): with client_knobs(kill_cursor_frequency=0.1): @@ -727,7 +721,7 @@ async def test_max_idle_time_reaper_removes_stale(self): async with server._pool.checkout() as conn_two: pass self.assertIs(conn_one, conn_two) - wait_until( + await async_wait_until( lambda: len(server._pool.conns) == 0, "stale socket reaped and new one NOT added to the pool", ) @@ -745,7 +739,7 @@ async def test_min_pool_size(self): server = await (await client._get_topology()).select_server( readable_server_selector, _Op.TEST ) - wait_until( + await async_wait_until( lambda: len(server._pool.conns) == 10, "pool initialized with 10 connections", ) @@ -753,7 +747,7 @@ async def test_min_pool_size(self): # Assert that if a socket is closed, a new one takes its place async with server._pool.checkout() as conn: conn.close_conn(None) - wait_until( + await async_wait_until( lambda: len(server._pool.conns) == 10, "a closed socket gets replaced from the pool", ) @@ -939,8 +933,10 @@ async def test_repr(self): async with eval(the_repr) as client_two: self.assertEqual(client_two, client) - def test_getters(self): - wait_until(lambda: async_client_context.nodes == self.client.nodes, "find all nodes") + async def test_getters(self): + await async_wait_until( + lambda: async_client_context.nodes == self.client.nodes, "find all nodes" + ) async def test_list_databases(self): cmd_docs = (await self.client.admin.command("listDatabases"))["databases"] @@ -1065,14 +1061,21 @@ async def test_uri_connect_option(self): self.assertFalse(client._topology._opened) # Ensure kill cursors thread has not been started. - kc_thread = client._kill_cursors_executor._thread - self.assertFalse(kc_thread and kc_thread.is_alive()) - + if _IS_SYNC: + kc_thread = client._kill_cursors_executor._thread + self.assertFalse(kc_thread and kc_thread.is_alive()) + else: + kc_task = client._kill_cursors_executor._task + self.assertFalse(kc_task and not kc_task.done()) # Using the client should open topology and start the thread. await client.admin.command("ping") self.assertTrue(client._topology._opened) - kc_thread = client._kill_cursors_executor._thread - self.assertTrue(kc_thread and kc_thread.is_alive()) + if _IS_SYNC: + kc_thread = client._kill_cursors_executor._thread + self.assertTrue(kc_thread and kc_thread.is_alive()) + else: + kc_task = client._kill_cursors_executor._task + self.assertTrue(kc_task and not kc_task.done()) async def test_close_does_not_open_servers(self): client = await self.async_rs_client(connect=False) @@ -1277,6 +1280,7 @@ async def get_x(db): async def test_server_selection_timeout(self): client = AsyncMongoClient(serverSelectionTimeoutMS=100, connect=False) self.assertAlmostEqual(0.1, client.options.server_selection_timeout) + await client.close() client = AsyncMongoClient(serverSelectionTimeoutMS=0, connect=False) @@ -1289,18 +1293,22 @@ async def test_server_selection_timeout(self): self.assertRaises( ConfigurationError, AsyncMongoClient, serverSelectionTimeoutMS=None, connect=False ) + await client.close() client = AsyncMongoClient( "mongodb://localhost/?serverSelectionTimeoutMS=100", connect=False ) self.assertAlmostEqual(0.1, client.options.server_selection_timeout) + await client.close() client = AsyncMongoClient("mongodb://localhost/?serverSelectionTimeoutMS=0", connect=False) self.assertAlmostEqual(0, client.options.server_selection_timeout) + await client.close() # Test invalid timeout in URI ignored and set to default. client = AsyncMongoClient("mongodb://localhost/?serverSelectionTimeoutMS=-1", connect=False) self.assertAlmostEqual(30, client.options.server_selection_timeout) + await client.close() client = AsyncMongoClient("mongodb://localhost/?serverSelectionTimeoutMS=", connect=False) self.assertAlmostEqual(30, client.options.server_selection_timeout) @@ -1608,7 +1616,7 @@ def init(self, *args): await async_client_context.port, ) await self.async_single_client(uri, event_listeners=[listener]) - wait_until( + await async_wait_until( lambda: len(listener.results) >= 2, "record two ServerHeartbeatStartedEvents" ) @@ -1766,16 +1774,16 @@ async def test_background_connections_do_not_hold_locks(self): pool = await async_get_pool(client) original_connect = pool.connect - def stall_connect(*args, **kwargs): - time.sleep(2) - return original_connect(*args, **kwargs) + async def stall_connect(*args, **kwargs): + await asyncio.sleep(2) + return await original_connect(*args, **kwargs) pool.connect = stall_connect # Un-patch Pool.connect to break the cyclic reference. self.addCleanup(delattr, pool, "connect") # Wait for the background thread to start creating connections - wait_until(lambda: len(pool.conns) > 1, "start creating connections") + await async_wait_until(lambda: len(pool.conns) > 1, "start creating connections") # Assert that application operations do not block. for _ in range(10): @@ -1858,7 +1866,7 @@ async def test_process_periodic_tasks(self): await client.close() # Add cursor to kill cursors queue del cursor - wait_until( + await async_wait_until( lambda: client._kill_cursors_queue, "waited for cursor to be added to queue", ) @@ -2232,7 +2240,7 @@ async def test_exhaust_getmore_network_error(self): await cursor.to_list() self.assertTrue(conn.closed) - wait_until( + await async_wait_until( lambda: len(client._kill_cursors_queue) == 0, "waited for all killCursor requests to complete", ) @@ -2403,7 +2411,7 @@ async def test_discover_primary(self): ) self.addAsyncCleanup(c.close) - wait_until(lambda: len(c.nodes) == 3, "connect") + await async_wait_until(lambda: len(c.nodes) == 3, "connect") self.assertEqual(await c.address, ("a", 1)) # Fail over. @@ -2430,7 +2438,7 @@ async def test_reconnect(self): ) self.addAsyncCleanup(c.close) - wait_until(lambda: len(c.nodes) == 3, "connect") + await async_wait_until(lambda: len(c.nodes) == 3, "connect") # Total failure. c.kill_host("a:1") @@ -2472,7 +2480,7 @@ async def _test_network_error(self, operation_callback): c.set_wire_version_range("a:1", 2, MIN_SUPPORTED_WIRE_VERSION) c.set_wire_version_range("b:2", 2, MIN_SUPPORTED_WIRE_VERSION + 1) await (await c._get_topology()).select_servers(writable_server_selector, _Op.TEST) - wait_until(lambda: len(c.nodes) == 2, "connect") + await async_wait_until(lambda: len(c.nodes) == 2, "connect") c.kill_host("a:1") @@ -2544,11 +2552,11 @@ async def test_rs_client_does_not_maintain_pool_to_arbiters(self): ) self.addAsyncCleanup(c.close) - wait_until(lambda: len(c.nodes) == 3, "connect") + await async_wait_until(lambda: len(c.nodes) == 3, "connect") self.assertEqual(await c.address, ("a", 1)) self.assertEqual(await c.arbiters, {("c", 3)}) # Assert that we create 2 and only 2 pooled connections. - listener.wait_for_event(monitoring.ConnectionReadyEvent, 2) + await listener.async_wait_for_event(monitoring.ConnectionReadyEvent, 2) self.assertEqual(listener.event_count(monitoring.ConnectionCreatedEvent), 2) # Assert that we do not create connections to arbiters. arbiter = c._topology.get_server_by_address(("c", 3)) @@ -2574,10 +2582,10 @@ async def test_direct_client_maintains_pool_to_arbiter(self): ) self.addAsyncCleanup(c.close) - wait_until(lambda: len(c.nodes) == 1, "connect") + await async_wait_until(lambda: len(c.nodes) == 1, "connect") self.assertEqual(await c.address, ("c", 3)) # Assert that we create 1 pooled connection. - listener.wait_for_event(monitoring.ConnectionReadyEvent, 1) + await listener.async_wait_for_event(monitoring.ConnectionReadyEvent, 1) self.assertEqual(listener.event_count(monitoring.ConnectionCreatedEvent), 1) arbiter = c._topology.get_server_by_address(("c", 3)) self.assertEqual(len(arbiter.pool.conns), 1) diff --git a/test/asynchronous/test_collation.py b/test/asynchronous/test_collation.py index d95f4c9917..d7fd85b168 100644 --- a/test/asynchronous/test_collation.py +++ b/test/asynchronous/test_collation.py @@ -97,28 +97,21 @@ class TestCollation(AsyncIntegrationTest): warn_context: Any collation: Collation - @classmethod @async_client_context.require_connection - async def _setup_class(cls): - await super()._setup_class() - cls.listener = OvertCommandListener() - cls.client = await cls.unmanaged_async_rs_or_single_client(event_listeners=[cls.listener]) - cls.db = cls.client.pymongo_test - cls.collation = Collation("en_US") - cls.warn_context = warnings.catch_warnings() - cls.warn_context.__enter__() - warnings.simplefilter("ignore", DeprecationWarning) - - @classmethod - async def _tearDown_class(cls): - cls.warn_context.__exit__() - cls.warn_context = None - await cls.client.close() - await super()._tearDown_class() - - def tearDown(self): + async def asyncSetUp(self) -> None: + await super().asyncSetUp() + self.listener = OvertCommandListener() + self.client = await self.async_rs_or_single_client(event_listeners=[self.listener]) + self.db = self.client.pymongo_test + self.collation = Collation("en_US") + self.warn_context = warnings.catch_warnings() + self.warn_context.__enter__() + + async def asyncTearDown(self) -> None: + self.warn_context.__exit__() + self.warn_context = None self.listener.reset() - super().tearDown() + await super().asyncTearDown() def last_command_started(self): return self.listener.started_events[-1].command diff --git a/test/asynchronous/test_collection.py b/test/asynchronous/test_collection.py index db52bad4ac..528919f63c 100644 --- a/test/asynchronous/test_collection.py +++ b/test/asynchronous/test_collection.py @@ -40,7 +40,6 @@ async_get_pool, async_is_mongos, async_wait_until, - wait_until, ) from bson import encode @@ -88,14 +87,10 @@ class TestCollectionNoConnect(AsyncUnitTest): db: AsyncDatabase client: AsyncMongoClient - @classmethod - async def _setup_class(cls): - cls.client = AsyncMongoClient(connect=False) - cls.db = cls.client.pymongo_test - - @classmethod - async def _tearDown_class(cls): - await cls.client.close() + async def asyncSetUp(self) -> None: + await super().asyncSetUp() + self.client = self.simple_client(connect=False) + self.db = self.client.pymongo_test def test_collection(self): self.assertRaises(TypeError, AsyncCollection, self.db, 5) @@ -165,27 +160,14 @@ def test_iteration(self): class AsyncTestCollection(AsyncIntegrationTest): w: int - @classmethod - def setUpClass(cls): - super().setUpClass() - cls.w = async_client_context.w # type: ignore - - @classmethod - def tearDownClass(cls): - if _IS_SYNC: - cls.db.drop_collection("test_large_limit") # type: ignore[unused-coroutine] - else: - asyncio.run(cls.async_tearDownClass()) - - @classmethod - async def async_tearDownClass(cls): - await cls.db.drop_collection("test_large_limit") - async def asyncSetUp(self): - await self.db.test.drop() + await super().asyncSetUp() + self.w = async_client_context.w # type: ignore async def asyncTearDown(self): await self.db.test.drop() + await self.db.drop_collection("test_large_limit") + await super().asyncTearDown() @contextlib.contextmanager def write_concern_collection(self): @@ -1023,7 +1005,10 @@ async def test_replace_bypass_document_validation(self): await db.test.insert_one({"y": 1}, bypass_document_validation=True) await db_w0.test.replace_one({"y": 1}, {"x": 1}, bypass_document_validation=True) - await async_wait_until(lambda: db_w0.test.find_one({"x": 1}), "find w:0 replaced document") + async def predicate(): + return await db_w0.test.find_one({"x": 1}) + + await async_wait_until(predicate, "find w:0 replaced document") async def test_update_bypass_document_validation(self): db = self.db @@ -1871,7 +1856,7 @@ async def test_exhaust(self): await cur.close() cur = None # Wait until the background thread returns the socket. - wait_until(lambda: pool.active_sockets == 0, "return socket") + await async_wait_until(lambda: pool.active_sockets == 0, "return socket") # The socket should be discarded. self.assertEqual(0, len(pool.conns)) diff --git a/test/asynchronous/test_connections_survive_primary_stepdown_spec.py b/test/asynchronous/test_connections_survive_primary_stepdown_spec.py index 289cf49751..bc9638b443 100644 --- a/test/asynchronous/test_connections_survive_primary_stepdown_spec.py +++ b/test/asynchronous/test_connections_survive_primary_stepdown_spec.py @@ -19,7 +19,12 @@ sys.path[0:0] = [""] -from test.asynchronous import AsyncIntegrationTest, async_client_context, unittest +from test.asynchronous import ( + AsyncIntegrationTest, + async_client_context, + reset_client_context, + unittest, +) from test.asynchronous.helpers import async_repl_set_step_down from test.utils import ( CMAPListener, @@ -39,29 +44,19 @@ class TestAsyncConnectionsSurvivePrimaryStepDown(AsyncIntegrationTest): listener: CMAPListener coll: AsyncCollection - @classmethod @async_client_context.require_replica_set - async def _setup_class(cls): - await super()._setup_class() - cls.listener = CMAPListener() - cls.client = await cls.unmanaged_async_rs_or_single_client( - event_listeners=[cls.listener], retryWrites=False, heartbeatFrequencyMS=500 + async def asyncSetUp(self): + self.listener = CMAPListener() + self.client = await self.async_rs_or_single_client( + event_listeners=[self.listener], retryWrites=False, heartbeatFrequencyMS=500 ) # Ensure connections to all servers in replica set. This is to test # that the is_writable flag is properly updated for connections that # survive a replica set election. - await async_ensure_all_connected(cls.client) - cls.listener.reset() - - cls.db = cls.client.get_database("step-down", write_concern=WriteConcern("majority")) - cls.coll = cls.db.get_collection("step-down", write_concern=WriteConcern("majority")) - - @classmethod - async def _tearDown_class(cls): - await cls.client.close() - - async def asyncSetUp(self): + await async_ensure_all_connected(self.client) + self.db = self.client.get_database("step-down", write_concern=WriteConcern("majority")) + self.coll = self.db.get_collection("step-down", write_concern=WriteConcern("majority")) # Note that all ops use same write-concern as self.db (majority). await self.db.drop_collection("step-down") await self.db.create_collection("step-down") diff --git a/test/asynchronous/test_create_entities.py b/test/asynchronous/test_create_entities.py index cb2ec63f4c..1f68cf6ddc 100644 --- a/test/asynchronous/test_create_entities.py +++ b/test/asynchronous/test_create_entities.py @@ -56,6 +56,9 @@ async def test_store_events_as_entities(self): self.assertGreater(len(final_entity_map["events1"]), 0) for event in final_entity_map["events1"]: self.assertIn("PoolCreatedEvent", event["name"]) + if self.scenario_runner.mongos_clients: + for client in self.scenario_runner.mongos_clients: + await client.close() async def test_store_all_others_as_entities(self): self.scenario_runner = UnifiedSpecTestMixinV1() @@ -122,6 +125,9 @@ async def test_store_all_others_as_entities(self): self.assertEqual(entity_map["failures"], []) self.assertEqual(entity_map["successes"], 2) self.assertEqual(entity_map["iterations"], 5) + if self.scenario_runner.mongos_clients: + for client in self.scenario_runner.mongos_clients: + await client.close() if __name__ == "__main__": diff --git a/test/asynchronous/test_cursor.py b/test/asynchronous/test_cursor.py index 787da3d957..d216479451 100644 --- a/test/asynchronous/test_cursor.py +++ b/test/asynchronous/test_cursor.py @@ -34,9 +34,9 @@ AllowListEventListener, EventListener, OvertCommandListener, + async_wait_until, delay, ignore_deprecations, - wait_until, ) from bson import decode_all @@ -1324,8 +1324,8 @@ async def test_timeout_kills_cursor_asynchronously(self): with self.assertRaises(ExecutionTimeout): await cursor.next() - def assertCursorKilled(): - wait_until( + async def assertCursorKilled(): + await async_wait_until( lambda: len(listener.succeeded_events), "find successful killCursors command", ) @@ -1335,7 +1335,7 @@ def assertCursorKilled(): self.assertEqual(1, len(listener.succeeded_events)) self.assertEqual("killCursors", listener.succeeded_events[0].command_name) - assertCursorKilled() + await assertCursorKilled() listener.reset() cursor = await coll.aggregate([], batchSize=1) @@ -1345,7 +1345,7 @@ def assertCursorKilled(): with self.assertRaises(ExecutionTimeout): await cursor.next() - assertCursorKilled() + await assertCursorKilled() def test_delete_not_initialized(self): # Creating a cursor with invalid arguments will not run __init__ @@ -1647,10 +1647,6 @@ async def test_monitoring(self): class TestRawBatchCommandCursor(AsyncIntegrationTest): - @classmethod - async def _setup_class(cls): - await super()._setup_class() - async def test_aggregate_raw(self): c = self.db.test await c.drop() diff --git a/test/asynchronous/test_database.py b/test/asynchronous/test_database.py index 61369c8542..b5a5960420 100644 --- a/test/asynchronous/test_database.py +++ b/test/asynchronous/test_database.py @@ -717,7 +717,8 @@ def test_with_options(self): class TestDatabaseAggregation(AsyncIntegrationTest): - def setUp(self): + async def asyncSetUp(self): + await super().asyncSetUp() self.pipeline: List[Mapping[str, Any]] = [ {"$listLocalSessions": {}}, {"$limit": 1}, diff --git a/test/asynchronous/test_encryption.py b/test/asynchronous/test_encryption.py index 767b3ecf0a..048db2d501 100644 --- a/test/asynchronous/test_encryption.py +++ b/test/asynchronous/test_encryption.py @@ -211,11 +211,10 @@ async def test_kwargs(self): class AsyncEncryptionIntegrationTest(AsyncIntegrationTest): """Base class for encryption integration tests.""" - @classmethod @unittest.skipUnless(_HAVE_PYMONGOCRYPT, "pymongocrypt is not installed") @async_client_context.require_version_min(4, 2, -1) - async def _setup_class(cls): - await super()._setup_class() + async def asyncSetUp(self) -> None: + await super().asyncSetUp() def assertEncrypted(self, val): self.assertIsInstance(val, Binary) @@ -430,10 +429,9 @@ async def test_upsert_uuid_standard_encrypt(self): class TestClientMaxWireVersion(AsyncIntegrationTest): - @classmethod @unittest.skipUnless(_HAVE_PYMONGOCRYPT, "pymongocrypt is not installed") - async def _setup_class(cls): - await super()._setup_class() + async def asyncSetUp(self): + await super().asyncSetUp() @async_client_context.require_version_max(4, 0, 99) async def test_raise_max_wire_version_error(self): @@ -818,17 +816,16 @@ class TestDataKeyDoubleEncryption(AsyncEncryptionIntegrationTest): "local": None, } - @classmethod @unittest.skipUnless( any([all(AWS_CREDS.values()), all(AZURE_CREDS.values()), all(GCP_CREDS.values())]), "No environment credentials are set", ) - async def _setup_class(cls): - await super()._setup_class() - cls.listener = OvertCommandListener() - cls.client = await cls.unmanaged_async_rs_or_single_client(event_listeners=[cls.listener]) - await cls.client.db.coll.drop() - cls.vault = await create_key_vault(cls.client.keyvault.datakeys) + async def asyncSetUp(self): + await super().asyncSetUp() + self.listener = OvertCommandListener() + self.client = await self.async_rs_or_single_client(event_listeners=[self.listener]) + await self.client.db.coll.drop() + self.vault = await create_key_vault(self.client.keyvault.datakeys) # Configure the encrypted field via the local schema_map option. schemas = { @@ -846,25 +843,22 @@ async def _setup_class(cls): } } opts = AutoEncryptionOpts( - cls.KMS_PROVIDERS, "keyvault.datakeys", schema_map=schemas, kms_tls_options=KMS_TLS_OPTS + self.KMS_PROVIDERS, + "keyvault.datakeys", + schema_map=schemas, + kms_tls_options=KMS_TLS_OPTS, ) - cls.client_encrypted = await cls.unmanaged_async_rs_or_single_client( + self.client_encrypted = await self.async_rs_or_single_client( auto_encryption_opts=opts, uuidRepresentation="standard" ) - cls.client_encryption = cls.unmanaged_create_client_encryption( - cls.KMS_PROVIDERS, "keyvault.datakeys", cls.client, OPTS, kms_tls_options=KMS_TLS_OPTS + self.client_encryption = self.create_client_encryption( + self.KMS_PROVIDERS, "keyvault.datakeys", self.client, OPTS, kms_tls_options=KMS_TLS_OPTS ) - - @classmethod - async def _tearDown_class(cls): - await cls.vault.drop() - await cls.client.close() - await cls.client_encrypted.close() - await cls.client_encryption.close() - - def setUp(self): self.listener.reset() + async def asyncTearDown(self) -> None: + await self.vault.drop() + async def run_test(self, provider_name): # Create data key. master_key: Any = self.MASTER_KEYS[provider_name] @@ -1011,10 +1005,9 @@ async def test_views_are_prohibited(self): class TestCorpus(AsyncEncryptionIntegrationTest): - @classmethod @unittest.skipUnless(any(AWS_CREDS.values()), "AWS environment credentials are not set") - async def _setup_class(cls): - await super()._setup_class() + async def asyncSetUp(self): + await super().asyncSetUp() @staticmethod def kms_providers(): @@ -1188,12 +1181,11 @@ class TestBsonSizeBatches(AsyncEncryptionIntegrationTest): client_encrypted: AsyncMongoClient listener: OvertCommandListener - @classmethod - async def _setup_class(cls): - await super()._setup_class() + async def asyncSetUp(self): + await super().asyncSetUp() db = async_client_context.client.db - cls.coll = db.coll - await cls.coll.drop() + self.coll = db.coll + await self.coll.drop() # Configure the encrypted 'db.coll' collection via jsonSchema. json_schema = json_data("limits", "limits-schema.json") await db.create_collection( @@ -1211,17 +1203,14 @@ async def _setup_class(cls): await coll.insert_one(json_data("limits", "limits-key.json")) opts = AutoEncryptionOpts({"local": {"key": LOCAL_MASTER_KEY}}, "keyvault.datakeys") - cls.listener = OvertCommandListener() - cls.client_encrypted = await cls.unmanaged_async_rs_or_single_client( - auto_encryption_opts=opts, event_listeners=[cls.listener] + self.listener = OvertCommandListener() + self.client_encrypted = await self.async_rs_or_single_client( + auto_encryption_opts=opts, event_listeners=[self.listener] ) - cls.coll_encrypted = cls.client_encrypted.db.coll + self.coll_encrypted = self.client_encrypted.db.coll - @classmethod - async def _tearDown_class(cls): - await cls.coll_encrypted.drop() - await cls.client_encrypted.close() - await super()._tearDown_class() + async def asyncTearDown(self) -> None: + await self.coll_encrypted.drop() async def test_01_insert_succeeds_under_2MiB(self): doc = {"_id": "over_2mib_under_16mib", "unencrypted": "a" * _2_MiB} @@ -1245,7 +1234,9 @@ async def test_03_bulk_batch_split(self): doc2 = {"_id": "over_2mib_2", "unencrypted": "a" * _2_MiB} self.listener.reset() await self.coll_encrypted.bulk_write([InsertOne(doc1), InsertOne(doc2)]) - self.assertEqual(self.listener.started_command_names(), ["insert", "insert"]) + self.assertEqual( + len([c for c in self.listener.started_command_names() if c == "insert"]), 2 + ) async def test_04_bulk_batch_split(self): limits_doc = json_data("limits", "limits-doc.json") @@ -1255,7 +1246,9 @@ async def test_04_bulk_batch_split(self): doc2.update(limits_doc) self.listener.reset() await self.coll_encrypted.bulk_write([InsertOne(doc1), InsertOne(doc2)]) - self.assertEqual(self.listener.started_command_names(), ["insert", "insert"]) + self.assertEqual( + len([c for c in self.listener.started_command_names() if c == "insert"]), 2 + ) async def test_05_insert_succeeds_just_under_16MiB(self): doc = {"_id": "under_16mib", "unencrypted": "a" * (_16_MiB - 2000)} @@ -1285,15 +1278,12 @@ async def test_06_insert_fails_over_16MiB(self): class TestCustomEndpoint(AsyncEncryptionIntegrationTest): """Prose tests for creating data keys with a custom endpoint.""" - @classmethod @unittest.skipUnless( any([all(AWS_CREDS.values()), all(AZURE_CREDS.values()), all(GCP_CREDS.values())]), "No environment credentials are set", ) - async def _setup_class(cls): - await super()._setup_class() - - def setUp(self): + async def asyncSetUp(self): + await super().asyncSetUp() kms_providers = { "aws": AWS_CREDS, "azure": AZURE_CREDS, @@ -1322,10 +1312,6 @@ def setUp(self): self._kmip_host_error = None self._invalid_host_error = None - async def asyncTearDown(self): - await self.client_encryption.close() - await self.client_encryption_invalid.close() - async def run_test_expected_success(self, provider_name, master_key): data_key_id = await self.client_encryption.create_data_key( provider_name, master_key=master_key @@ -1500,18 +1486,18 @@ class AzureGCPEncryptionTestMixin(AsyncEncryptionIntegrationTest): KEYVAULT_COLL = "datakeys" client: AsyncMongoClient - async def asyncSetUp(self): + async def _setup(self): keyvault = self.client.get_database(self.KEYVAULT_DB).get_collection(self.KEYVAULT_COLL) await create_key_vault(keyvault, self.DEK) async def _test_explicit(self, expectation): + await self._setup() client_encryption = self.create_client_encryption( self.KMS_PROVIDER_MAP, # type: ignore[arg-type] ".".join([self.KEYVAULT_DB, self.KEYVAULT_COLL]), async_client_context.client, OPTS, ) - self.addAsyncCleanup(client_encryption.close) ciphertext = await client_encryption.encrypt( "string0", @@ -1523,6 +1509,7 @@ async def _test_explicit(self, expectation): self.assertEqual(await client_encryption.decrypt(ciphertext), "string0") async def _test_automatic(self, expectation_extjson, payload): + await self._setup() encrypted_db = "db" encrypted_coll = "coll" keyvault_namespace = ".".join([self.KEYVAULT_DB, self.KEYVAULT_COLL]) @@ -1537,7 +1524,6 @@ async def _test_automatic(self, expectation_extjson, payload): client = await self.async_rs_or_single_client( auto_encryption_opts=encryption_opts, event_listeners=[insert_listener] ) - self.addAsyncCleanup(client.aclose) coll = client.get_database(encrypted_db).get_collection( encrypted_coll, codec_options=OPTS, write_concern=WriteConcern("majority") @@ -1559,13 +1545,12 @@ async def _test_automatic(self, expectation_extjson, payload): class TestAzureEncryption(AzureGCPEncryptionTestMixin, AsyncEncryptionIntegrationTest): - @classmethod @unittest.skipUnless(any(AZURE_CREDS.values()), "Azure environment credentials are not set") - async def _setup_class(cls): - cls.KMS_PROVIDER_MAP = {"azure": AZURE_CREDS} - cls.DEK = json_data(BASE, "custom", "azure-dek.json") - cls.SCHEMA_MAP = json_data(BASE, "custom", "azure-gcp-schema.json") - await super()._setup_class() + async def asyncSetUp(self): + self.KMS_PROVIDER_MAP = {"azure": AZURE_CREDS} + self.DEK = json_data(BASE, "custom", "azure-dek.json") + self.SCHEMA_MAP = json_data(BASE, "custom", "azure-gcp-schema.json") + await super().asyncSetUp() async def test_explicit(self): return await self._test_explicit( @@ -1585,13 +1570,12 @@ async def test_automatic(self): class TestGCPEncryption(AzureGCPEncryptionTestMixin, AsyncEncryptionIntegrationTest): - @classmethod @unittest.skipUnless(any(GCP_CREDS.values()), "GCP environment credentials are not set") - async def _setup_class(cls): - cls.KMS_PROVIDER_MAP = {"gcp": GCP_CREDS} - cls.DEK = json_data(BASE, "custom", "gcp-dek.json") - cls.SCHEMA_MAP = json_data(BASE, "custom", "azure-gcp-schema.json") - await super()._setup_class() + async def asyncSetUp(self): + self.KMS_PROVIDER_MAP = {"gcp": GCP_CREDS} + self.DEK = json_data(BASE, "custom", "gcp-dek.json") + self.SCHEMA_MAP = json_data(BASE, "custom", "azure-gcp-schema.json") + await super().asyncSetUp() async def test_explicit(self): return await self._test_explicit( @@ -1613,6 +1597,7 @@ async def test_automatic(self): # https://github.com/mongodb/specifications/blob/master/source/client-side-encryption/tests/README.md#deadlock-tests class TestDeadlockProse(AsyncEncryptionIntegrationTest): async def asyncSetUp(self): + await super().asyncSetUp() self.client_test = await self.async_rs_or_single_client( maxPoolSize=1, readConcernLevel="majority", w="majority", uuidRepresentation="standard" ) @@ -1645,7 +1630,6 @@ async def asyncSetUp(self): self.ciphertext = await client_encryption.encrypt( "string0", Algorithm.AEAD_AES_256_CBC_HMAC_SHA_512_Deterministic, key_alt_name="local" ) - await client_encryption.close() self.client_listener = OvertCommandListener() self.topology_listener = TopologyEventListener() @@ -1840,6 +1824,7 @@ async def test_case_8(self): # https://github.com/mongodb/specifications/blob/master/source/client-side-encryption/tests/README.md#14-decryption-events class TestDecryptProse(AsyncEncryptionIntegrationTest): async def asyncSetUp(self): + await super().asyncSetUp() self.client = async_client_context.client await self.client.db.drop_collection("decryption_events") await create_key_vault(self.client.keyvault.datakeys) @@ -2275,6 +2260,7 @@ async def test_06_named_kms_providers_apply_tls_options_kmip(self): # https://github.com/mongodb/specifications/blob/50e26fe/source/client-side-encryption/tests/README.md#unique-index-on-keyaltnames class TestUniqueIndexOnKeyAltNamesProse(AsyncEncryptionIntegrationTest): async def asyncSetUp(self): + await super().asyncSetUp() self.client = async_client_context.client await create_key_vault(self.client.keyvault.datakeys) kms_providers_map = {"local": {"key": LOCAL_MASTER_KEY}} @@ -2624,8 +2610,6 @@ async def AsyncMongoClient(**kwargs): assert isinstance(res["encrypted_indexed"], Binary) assert isinstance(res["encrypted_unindexed"], Binary) - await client_encryption.close() - # https://github.com/mongodb/specifications/blob/master/source/client-side-encryption/tests/README.md#22-range-explicit-encryption class TestRangeQueryProse(AsyncEncryptionIntegrationTest): @@ -3089,17 +3073,11 @@ class TestNoSessionsSupport(AsyncEncryptionIntegrationTest): mongocryptd_client: AsyncMongoClient MONGOCRYPTD_PORT = 27020 - @classmethod @unittest.skipIf(os.environ.get("TEST_CRYPT_SHARED"), "crypt_shared lib is installed") - async def _setup_class(cls): - await super()._setup_class() - start_mongocryptd(cls.MONGOCRYPTD_PORT) - - @classmethod - async def _tearDown_class(cls): - await super()._tearDown_class() - async def asyncSetUp(self) -> None: + await super().asyncSetUp() + start_mongocryptd(self.MONGOCRYPTD_PORT) + self.listener = OvertCommandListener() self.mongocryptd_client = self.simple_client( f"mongodb://localhost:{self.MONGOCRYPTD_PORT}", event_listeners=[self.listener] diff --git a/test/asynchronous/test_grid_file.py b/test/asynchronous/test_grid_file.py index 54fcd3abf6..affdacde91 100644 --- a/test/asynchronous/test_grid_file.py +++ b/test/asynchronous/test_grid_file.py @@ -97,6 +97,7 @@ def test_grid_in_custom_opts(self): class AsyncTestGridFile(AsyncIntegrationTest): async def asyncSetUp(self): + await super().asyncSetUp() await self.cleanup_colls(self.db.fs.files, self.db.fs.chunks) async def test_basic(self): diff --git a/test/asynchronous/test_locks.py b/test/asynchronous/test_locks.py index e0e7f2fc8d..e5a0adfee6 100644 --- a/test/asynchronous/test_locks.py +++ b/test/asynchronous/test_locks.py @@ -16,498 +16,447 @@ import asyncio import sys -import threading import unittest +from pymongo.lock import _async_create_condition, _async_create_lock + sys.path[0:0] = [""] -from pymongo.lock import _ACondition +if sys.version_info < (3, 13): + # Tests adapted from: https://github.com/python/cpython/blob/v3.13.0rc2/Lib/test/test_asyncio/test_locks.py + # Includes tests for: + # - https://github.com/python/cpython/issues/111693 + # - https://github.com/python/cpython/issues/112202 + class TestConditionStdlib(unittest.IsolatedAsyncioTestCase): + async def test_wait(self): + cond = _async_create_condition(_async_create_lock()) + result = [] + + async def c1(result): + await cond.acquire() + if await cond.wait(): + result.append(1) + return True + async def c2(result): + await cond.acquire() + if await cond.wait(): + result.append(2) + return True -# Tests adapted from: https://github.com/python/cpython/blob/v3.13.0rc2/Lib/test/test_asyncio/test_locks.py -# Includes tests for: -# - https://github.com/python/cpython/issues/111693 -# - https://github.com/python/cpython/issues/112202 -class TestConditionStdlib(unittest.IsolatedAsyncioTestCase): - async def test_wait(self): - cond = _ACondition(threading.Condition(threading.Lock())) - result = [] + async def c3(result): + await cond.acquire() + if await cond.wait(): + result.append(3) + return True - async def c1(result): - await cond.acquire() - if await cond.wait(): - result.append(1) - return True + t1 = asyncio.create_task(c1(result)) + t2 = asyncio.create_task(c2(result)) + t3 = asyncio.create_task(c3(result)) - async def c2(result): - await cond.acquire() - if await cond.wait(): - result.append(2) - return True + await asyncio.sleep(0) + self.assertEqual([], result) + self.assertFalse(cond.locked()) - async def c3(result): - await cond.acquire() - if await cond.wait(): - result.append(3) - return True - - t1 = asyncio.create_task(c1(result)) - t2 = asyncio.create_task(c2(result)) - t3 = asyncio.create_task(c3(result)) - - await asyncio.sleep(0) - self.assertEqual([], result) - self.assertFalse(cond.locked()) - - self.assertTrue(await cond.acquire()) - cond.notify() - await asyncio.sleep(0) - self.assertEqual([], result) - self.assertTrue(cond.locked()) - - cond.release() - await asyncio.sleep(0) - self.assertEqual([1], result) - self.assertTrue(cond.locked()) - - cond.notify(2) - await asyncio.sleep(0) - self.assertEqual([1], result) - self.assertTrue(cond.locked()) - - cond.release() - await asyncio.sleep(0) - self.assertEqual([1, 2], result) - self.assertTrue(cond.locked()) - - cond.release() - await asyncio.sleep(0) - self.assertEqual([1, 2, 3], result) - self.assertTrue(cond.locked()) - - self.assertTrue(t1.done()) - self.assertTrue(t1.result()) - self.assertTrue(t2.done()) - self.assertTrue(t2.result()) - self.assertTrue(t3.done()) - self.assertTrue(t3.result()) - - async def test_wait_cancel(self): - cond = _ACondition(threading.Condition(threading.Lock())) - await cond.acquire() - - wait = asyncio.create_task(cond.wait()) - asyncio.get_running_loop().call_soon(wait.cancel) - with self.assertRaises(asyncio.CancelledError): - await wait - self.assertFalse(cond._waiters) - self.assertTrue(cond.locked()) - - async def test_wait_cancel_contested(self): - cond = _ACondition(threading.Condition(threading.Lock())) - - await cond.acquire() - self.assertTrue(cond.locked()) - - wait_task = asyncio.create_task(cond.wait()) - await asyncio.sleep(0) - self.assertFalse(cond.locked()) - - # Notify, but contest the lock before cancelling - await cond.acquire() - self.assertTrue(cond.locked()) - cond.notify() - asyncio.get_running_loop().call_soon(wait_task.cancel) - asyncio.get_running_loop().call_soon(cond.release) - - try: - await wait_task - except asyncio.CancelledError: - # Should not happen, since no cancellation points - pass - - self.assertTrue(cond.locked()) - - async def test_wait_cancel_after_notify(self): - # See bpo-32841 - waited = False - - cond = _ACondition(threading.Condition(threading.Lock())) - - async def wait_on_cond(): - nonlocal waited - async with cond: - waited = True # Make sure this area was reached - await cond.wait() + self.assertTrue(await cond.acquire()) + cond.notify() + await asyncio.sleep(0) + self.assertEqual([], result) + self.assertTrue(cond.locked()) - waiter = asyncio.create_task(wait_on_cond()) - await asyncio.sleep(0) # Start waiting + cond.release() + await asyncio.sleep(0) + self.assertEqual([1], result) + self.assertTrue(cond.locked()) + + cond.notify(2) + await asyncio.sleep(0) + self.assertEqual([1], result) + self.assertTrue(cond.locked()) - await cond.acquire() - cond.notify() - await asyncio.sleep(0) # Get to acquire() - waiter.cancel() - await asyncio.sleep(0) # Activate cancellation - cond.release() - await asyncio.sleep(0) # Cancellation should occur + cond.release() + await asyncio.sleep(0) + self.assertEqual([1, 2], result) + self.assertTrue(cond.locked()) - self.assertTrue(waiter.cancelled()) - self.assertTrue(waited) + cond.release() + await asyncio.sleep(0) + self.assertEqual([1, 2, 3], result) + self.assertTrue(cond.locked()) - async def test_wait_unacquired(self): - cond = _ACondition(threading.Condition(threading.Lock())) - with self.assertRaises(RuntimeError): - await cond.wait() + self.assertTrue(t1.done()) + self.assertTrue(t1.result()) + self.assertTrue(t2.done()) + self.assertTrue(t2.result()) + self.assertTrue(t3.done()) + self.assertTrue(t3.result()) - async def test_wait_for(self): - cond = _ACondition(threading.Condition(threading.Lock())) - presult = False + async def test_wait_cancel(self): + cond = _async_create_condition(_async_create_lock()) + await cond.acquire() - def predicate(): - return presult + wait = asyncio.create_task(cond.wait()) + asyncio.get_running_loop().call_soon(wait.cancel) + with self.assertRaises(asyncio.CancelledError): + await wait + self.assertFalse(cond._waiters) + self.assertTrue(cond.locked()) - result = [] + async def test_wait_cancel_contested(self): + cond = _async_create_condition(_async_create_lock()) - async def c1(result): await cond.acquire() - if await cond.wait_for(predicate): - result.append(1) - cond.release() - return True + self.assertTrue(cond.locked()) - t = asyncio.create_task(c1(result)) + wait_task = asyncio.create_task(cond.wait()) + await asyncio.sleep(0) + self.assertFalse(cond.locked()) - await asyncio.sleep(0) - self.assertEqual([], result) + # Notify, but contest the lock before cancelling + await cond.acquire() + self.assertTrue(cond.locked()) + cond.notify() + asyncio.get_running_loop().call_soon(wait_task.cancel) + asyncio.get_running_loop().call_soon(cond.release) - await cond.acquire() - cond.notify() - cond.release() - await asyncio.sleep(0) - self.assertEqual([], result) + try: + await wait_task + except asyncio.CancelledError: + # Should not happen, since no cancellation points + pass - presult = True - await cond.acquire() - cond.notify() - cond.release() - await asyncio.sleep(0) - self.assertEqual([1], result) + self.assertTrue(cond.locked()) - self.assertTrue(t.done()) - self.assertTrue(t.result()) + async def test_wait_cancel_after_notify(self): + # See bpo-32841 + waited = False - async def test_wait_for_unacquired(self): - cond = _ACondition(threading.Condition(threading.Lock())) + cond = _async_create_condition(_async_create_lock()) - # predicate can return true immediately - res = await cond.wait_for(lambda: [1, 2, 3]) - self.assertEqual([1, 2, 3], res) + async def wait_on_cond(): + nonlocal waited + async with cond: + waited = True # Make sure this area was reached + await cond.wait() - with self.assertRaises(RuntimeError): - await cond.wait_for(lambda: False) + waiter = asyncio.create_task(wait_on_cond()) + await asyncio.sleep(0) # Start waiting - async def test_notify(self): - cond = _ACondition(threading.Condition(threading.Lock())) - result = [] + await cond.acquire() + cond.notify() + await asyncio.sleep(0) # Get to acquire() + waiter.cancel() + await asyncio.sleep(0) # Activate cancellation + cond.release() + await asyncio.sleep(0) # Cancellation should occur + + self.assertTrue(waiter.cancelled()) + self.assertTrue(waited) + + async def test_wait_unacquired(self): + cond = _async_create_condition(_async_create_lock()) + with self.assertRaises(RuntimeError): + await cond.wait() - async def c1(result): - async with cond: - if await cond.wait(): - result.append(1) - return True + async def test_wait_for(self): + cond = _async_create_condition(_async_create_lock()) + presult = False - async def c2(result): - async with cond: - if await cond.wait(): - result.append(2) - return True + def predicate(): + return presult - async def c3(result): - async with cond: - if await cond.wait(): - result.append(3) + result = [] + + async def c1(result): + await cond.acquire() + if await cond.wait_for(predicate): + result.append(1) + cond.release() return True - t1 = asyncio.create_task(c1(result)) - t2 = asyncio.create_task(c2(result)) - t3 = asyncio.create_task(c3(result)) + t = asyncio.create_task(c1(result)) - await asyncio.sleep(0) - self.assertEqual([], result) + await asyncio.sleep(0) + self.assertEqual([], result) - async with cond: - cond.notify(1) - await asyncio.sleep(1) - self.assertEqual([1], result) + await cond.acquire() + cond.notify() + cond.release() + await asyncio.sleep(0) + self.assertEqual([], result) - async with cond: - cond.notify(1) - cond.notify(2048) - await asyncio.sleep(1) - self.assertEqual([1, 2, 3], result) + presult = True + await cond.acquire() + cond.notify() + cond.release() + await asyncio.sleep(0) + self.assertEqual([1], result) - self.assertTrue(t1.done()) - self.assertTrue(t1.result()) - self.assertTrue(t2.done()) - self.assertTrue(t2.result()) - self.assertTrue(t3.done()) - self.assertTrue(t3.result()) + self.assertTrue(t.done()) + self.assertTrue(t.result()) - async def test_notify_all(self): - cond = _ACondition(threading.Condition(threading.Lock())) + async def test_wait_for_unacquired(self): + cond = _async_create_condition(_async_create_lock()) - result = [] + # predicate can return true immediately + res = await cond.wait_for(lambda: [1, 2, 3]) + self.assertEqual([1, 2, 3], res) - async def c1(result): - async with cond: - if await cond.wait(): - result.append(1) - return True + with self.assertRaises(RuntimeError): + await cond.wait_for(lambda: False) - async def c2(result): - async with cond: - if await cond.wait(): - result.append(2) - return True + async def test_notify(self): + cond = _async_create_condition(_async_create_lock()) + result = [] - t1 = asyncio.create_task(c1(result)) - t2 = asyncio.create_task(c2(result)) + async def c1(result): + async with cond: + if await cond.wait(): + result.append(1) + return True - await asyncio.sleep(0) - self.assertEqual([], result) + async def c2(result): + async with cond: + if await cond.wait(): + result.append(2) + return True - async with cond: - cond.notify_all() - await asyncio.sleep(1) - self.assertEqual([1, 2], result) + async def c3(result): + async with cond: + if await cond.wait(): + result.append(3) + return True - self.assertTrue(t1.done()) - self.assertTrue(t1.result()) - self.assertTrue(t2.done()) - self.assertTrue(t2.result()) + t1 = asyncio.create_task(c1(result)) + t2 = asyncio.create_task(c2(result)) + t3 = asyncio.create_task(c3(result)) - async def test_context_manager(self): - cond = _ACondition(threading.Condition(threading.Lock())) - self.assertFalse(cond.locked()) - async with cond: - self.assertTrue(cond.locked()) - self.assertFalse(cond.locked()) - - async def test_timeout_in_block(self): - condition = _ACondition(threading.Condition(threading.Lock())) - async with condition: - with self.assertRaises(asyncio.TimeoutError): - await asyncio.wait_for(condition.wait(), timeout=0.5) - - @unittest.skipIf( - sys.version_info < (3, 11), "raising the same cancelled error requires Python>=3.11" - ) - async def test_cancelled_error_wakeup(self): - # Test that a cancelled error, received when awaiting wakeup, - # will be re-raised un-modified. - wake = False - raised = None - cond = _ACondition(threading.Condition(threading.Lock())) - - async def func(): - nonlocal raised - async with cond: - with self.assertRaises(asyncio.CancelledError) as err: - await cond.wait_for(lambda: wake) - raised = err.exception - raise raised - - task = asyncio.create_task(func()) - await asyncio.sleep(0) - # Task is waiting on the condition, cancel it there. - task.cancel(msg="foo") # type: ignore[call-arg] - with self.assertRaises(asyncio.CancelledError) as err: - await task - self.assertEqual(err.exception.args, ("foo",)) - # We should have got the _same_ exception instance as the one - # originally raised. - self.assertIs(err.exception, raised) - - @unittest.skipIf( - sys.version_info < (3, 11), "raising the same cancelled error requires Python>=3.11" - ) - async def test_cancelled_error_re_aquire(self): - # Test that a cancelled error, received when re-aquiring lock, - # will be re-raised un-modified. - wake = False - raised = None - cond = _ACondition(threading.Condition(threading.Lock())) - - async def func(): - nonlocal raised - async with cond: - with self.assertRaises(asyncio.CancelledError) as err: - await cond.wait_for(lambda: wake) - raised = err.exception - raise raised - - task = asyncio.create_task(func()) - await asyncio.sleep(0) - # Task is waiting on the condition - await cond.acquire() - wake = True - cond.notify() - await asyncio.sleep(0) - # Task is now trying to re-acquire the lock, cancel it there. - task.cancel(msg="foo") # type: ignore[call-arg] - cond.release() - with self.assertRaises(asyncio.CancelledError) as err: - await task - self.assertEqual(err.exception.args, ("foo",)) - # We should have got the _same_ exception instance as the one - # originally raised. - self.assertIs(err.exception, raised) - - @unittest.skipIf(sys.version_info < (3, 11), "asyncio.timeout requires Python>=3.11") - async def test_cancelled_wakeup(self): - # Test that a task cancelled at the "same" time as it is woken - # up as part of a Condition.notify() does not result in a lost wakeup. - # This test simulates a cancel while the target task is awaiting initial - # wakeup on the wakeup queue. - condition = _ACondition(threading.Condition(threading.Lock())) - state = 0 - - async def consumer(): - nonlocal state - async with condition: - while True: - await condition.wait_for(lambda: state != 0) - if state < 0: - return - state -= 1 - - # create two consumers - c = [asyncio.create_task(consumer()) for _ in range(2)] - # wait for them to settle - await asyncio.sleep(0.1) - async with condition: - # produce one item and wake up one - state += 1 - condition.notify(1) - - # Cancel it while it is awaiting to be run. - # This cancellation could come from the outside - c[0].cancel() - - # now wait for the item to be consumed - # if it doesn't means that our "notify" didn"t take hold. - # because it raced with a cancel() - try: - async with asyncio.timeout(1): - await condition.wait_for(lambda: state == 0) - except TimeoutError: - pass - self.assertEqual(state, 0) - - # clean up - state = -1 - condition.notify_all() - await c[1] - - @unittest.skipIf(sys.version_info < (3, 11), "asyncio.timeout requires Python>=3.11") - async def test_cancelled_wakeup_relock(self): - # Test that a task cancelled at the "same" time as it is woken - # up as part of a Condition.notify() does not result in a lost wakeup. - # This test simulates a cancel while the target task is acquiring the lock - # again. - condition = _ACondition(threading.Condition(threading.Lock())) - state = 0 - - async def consumer(): - nonlocal state - async with condition: - while True: - await condition.wait_for(lambda: state != 0) - if state < 0: - return - state -= 1 - - # create two consumers - c = [asyncio.create_task(consumer()) for _ in range(2)] - # wait for them to settle - await asyncio.sleep(0.1) - async with condition: - # produce one item and wake up one - state += 1 - condition.notify(1) - - # now we sleep for a bit. This allows the target task to wake up and - # settle on re-aquiring the lock await asyncio.sleep(0) + self.assertEqual([], result) - # Cancel it while awaiting the lock - # This cancel could come the outside. - c[0].cancel() + async with cond: + cond.notify(1) + await asyncio.sleep(1) + self.assertEqual([1], result) - # now wait for the item to be consumed - # if it doesn't means that our "notify" didn"t take hold. - # because it raced with a cancel() - try: - async with asyncio.timeout(1): - await condition.wait_for(lambda: state == 0) - except TimeoutError: - pass - self.assertEqual(state, 0) + async with cond: + cond.notify(1) + cond.notify(2048) + await asyncio.sleep(1) + self.assertEqual([1, 2, 3], result) - # clean up - state = -1 - condition.notify_all() - await c[1] + self.assertTrue(t1.done()) + self.assertTrue(t1.result()) + self.assertTrue(t2.done()) + self.assertTrue(t2.result()) + self.assertTrue(t3.done()) + self.assertTrue(t3.result()) + async def test_notify_all(self): + cond = _async_create_condition(_async_create_lock()) -class TestCondition(unittest.IsolatedAsyncioTestCase): - async def test_multiple_loops_notify(self): - cond = _ACondition(threading.Condition(threading.Lock())) + result = [] - def tmain(cond): - async def atmain(cond): - await asyncio.sleep(1) + async def c1(result): async with cond: - cond.notify(1) - - asyncio.run(atmain(cond)) - - t = threading.Thread(target=tmain, args=(cond,)) - t.start() + if await cond.wait(): + result.append(1) + return True - async with cond: - self.assertTrue(await cond.wait(30)) - t.join() - - async def test_multiple_loops_notify_all(self): - cond = _ACondition(threading.Condition(threading.Lock())) - results = [] - - def tmain(cond, results): - async def atmain(cond, results): - await asyncio.sleep(1) + async def c2(result): async with cond: - res = await cond.wait(30) - results.append(res) - - asyncio.run(atmain(cond, results)) + if await cond.wait(): + result.append(2) + return True - nthreads = 5 - threads = [] - for _ in range(nthreads): - threads.append(threading.Thread(target=tmain, args=(cond, results))) - for t in threads: - t.start() + t1 = asyncio.create_task(c1(result)) + t2 = asyncio.create_task(c2(result)) - await asyncio.sleep(2) - async with cond: - cond.notify_all() + await asyncio.sleep(0) + self.assertEqual([], result) - for t in threads: - t.join() + async with cond: + cond.notify_all() + await asyncio.sleep(1) + self.assertEqual([1, 2], result) + + self.assertTrue(t1.done()) + self.assertTrue(t1.result()) + self.assertTrue(t2.done()) + self.assertTrue(t2.result()) + + async def test_context_manager(self): + cond = _async_create_condition(_async_create_lock()) + self.assertFalse(cond.locked()) + async with cond: + self.assertTrue(cond.locked()) + self.assertFalse(cond.locked()) - self.assertEqual(results, [True] * nthreads) + async def test_timeout_in_block(self): + condition = _async_create_condition(_async_create_lock()) + async with condition: + with self.assertRaises(asyncio.TimeoutError): + await asyncio.wait_for(condition.wait(), timeout=0.5) + + @unittest.skipIf( + sys.version_info < (3, 11), "raising the same cancelled error requires Python>=3.11" + ) + async def test_cancelled_error_wakeup(self): + # Test that a cancelled error, received when awaiting wakeup, + # will be re-raised un-modified. + wake = False + raised = None + cond = _async_create_condition(_async_create_lock()) + + async def func(): + nonlocal raised + async with cond: + with self.assertRaises(asyncio.CancelledError) as err: + await cond.wait_for(lambda: wake) + raised = err.exception + raise raised + task = asyncio.create_task(func()) + await asyncio.sleep(0) + # Task is waiting on the condition, cancel it there. + task.cancel(msg="foo") # type: ignore[call-arg] + with self.assertRaises(asyncio.CancelledError) as err: + await task + self.assertEqual(err.exception.args, ("foo",)) + # We should have got the _same_ exception instance as the one + # originally raised. + self.assertIs(err.exception, raised) + + @unittest.skipIf( + sys.version_info < (3, 11), "raising the same cancelled error requires Python>=3.11" + ) + async def test_cancelled_error_re_aquire(self): + # Test that a cancelled error, received when re-aquiring lock, + # will be re-raised un-modified. + wake = False + raised = None + cond = _async_create_condition(_async_create_lock()) + + async def func(): + nonlocal raised + async with cond: + with self.assertRaises(asyncio.CancelledError) as err: + await cond.wait_for(lambda: wake) + raised = err.exception + raise raised -if __name__ == "__main__": - unittest.main() + task = asyncio.create_task(func()) + await asyncio.sleep(0) + # Task is waiting on the condition + await cond.acquire() + wake = True + cond.notify() + await asyncio.sleep(0) + # Task is now trying to re-acquire the lock, cancel it there. + task.cancel(msg="foo") # type: ignore[call-arg] + cond.release() + with self.assertRaises(asyncio.CancelledError) as err: + await task + self.assertEqual(err.exception.args, ("foo",)) + # We should have got the _same_ exception instance as the one + # originally raised. + self.assertIs(err.exception, raised) + + @unittest.skipIf(sys.version_info < (3, 11), "asyncio.timeout requires Python>=3.11") + async def test_cancelled_wakeup(self): + # Test that a task cancelled at the "same" time as it is woken + # up as part of a Condition.notify() does not result in a lost wakeup. + # This test simulates a cancel while the target task is awaiting initial + # wakeup on the wakeup queue. + condition = _async_create_condition(_async_create_lock()) + state = 0 + + async def consumer(): + nonlocal state + async with condition: + while True: + await condition.wait_for(lambda: state != 0) + if state < 0: + return + state -= 1 + + # create two consumers + c = [asyncio.create_task(consumer()) for _ in range(2)] + # wait for them to settle + await asyncio.sleep(0.1) + async with condition: + # produce one item and wake up one + state += 1 + condition.notify(1) + + # Cancel it while it is awaiting to be run. + # This cancellation could come from the outside + c[0].cancel() + + # now wait for the item to be consumed + # if it doesn't means that our "notify" didn"t take hold. + # because it raced with a cancel() + try: + async with asyncio.timeout(1): + await condition.wait_for(lambda: state == 0) + except TimeoutError: + pass + self.assertEqual(state, 0) + + # clean up + state = -1 + condition.notify_all() + await c[1] + + @unittest.skipIf(sys.version_info < (3, 11), "asyncio.timeout requires Python>=3.11") + async def test_cancelled_wakeup_relock(self): + # Test that a task cancelled at the "same" time as it is woken + # up as part of a Condition.notify() does not result in a lost wakeup. + # This test simulates a cancel while the target task is acquiring the lock + # again. + condition = _async_create_condition(_async_create_lock()) + state = 0 + + async def consumer(): + nonlocal state + async with condition: + while True: + await condition.wait_for(lambda: state != 0) + if state < 0: + return + state -= 1 + + # create two consumers + c = [asyncio.create_task(consumer()) for _ in range(2)] + # wait for them to settle + await asyncio.sleep(0.1) + async with condition: + # produce one item and wake up one + state += 1 + condition.notify(1) + + # now we sleep for a bit. This allows the target task to wake up and + # settle on re-aquiring the lock + await asyncio.sleep(0) + + # Cancel it while awaiting the lock + # This cancel could come the outside. + c[0].cancel() + + # now wait for the item to be consumed + # if it doesn't means that our "notify" didn"t take hold. + # because it raced with a cancel() + try: + async with asyncio.timeout(1): + await condition.wait_for(lambda: state == 0) + except TimeoutError: + pass + self.assertEqual(state, 0) + + # clean up + state = -1 + condition.notify_all() + await c[1] + + if __name__ == "__main__": + unittest.main() diff --git a/test/asynchronous/test_monitoring.py b/test/asynchronous/test_monitoring.py index b0c86ab54e..eaad60beac 100644 --- a/test/asynchronous/test_monitoring.py +++ b/test/asynchronous/test_monitoring.py @@ -52,22 +52,16 @@ class AsyncTestCommandMonitoring(AsyncIntegrationTest): listener: EventListener @classmethod - @async_client_context.require_connection - async def _setup_class(cls): - await super()._setup_class() + def setUpClass(cls) -> None: cls.listener = OvertCommandListener() - cls.client = await cls.unmanaged_async_rs_or_single_client( - event_listeners=[cls.listener], retryWrites=False - ) - @classmethod - async def _tearDown_class(cls): - await cls.client.close() - await super()._tearDown_class() - - async def asyncTearDown(self): + @async_client_context.require_connection + async def asyncSetUp(self) -> None: + await super().asyncSetUp() self.listener.reset() - await super().asyncTearDown() + self.client = await self.async_rs_or_single_client( + event_listeners=[self.listener], retryWrites=False + ) async def test_started_simple(self): await self.client.pymongo_test.command("ping") @@ -1140,26 +1134,23 @@ class AsyncTestGlobalListener(AsyncIntegrationTest): saved_listeners: Any @classmethod - @async_client_context.require_connection - async def _setup_class(cls): - await super()._setup_class() + def setUpClass(cls) -> None: cls.listener = OvertCommandListener() # We plan to call register(), which internally modifies _LISTENERS. cls.saved_listeners = copy.deepcopy(monitoring._LISTENERS) monitoring.register(cls.listener) - cls.client = await cls.unmanaged_async_single_client() - # Get one (authenticated) socket in the pool. - await cls.client.pymongo_test.command("ping") - - @classmethod - async def _tearDown_class(cls): - monitoring._LISTENERS = cls.saved_listeners - await cls.client.close() - await super()._tearDown_class() + @async_client_context.require_connection async def asyncSetUp(self): await super().asyncSetUp() self.listener.reset() + self.client = await self.async_single_client() + # Get one (authenticated) socket in the pool. + await self.client.pymongo_test.command("ping") + + @classmethod + def tearDownClass(cls): + monitoring._LISTENERS = cls.saved_listeners async def test_simple(self): await self.client.pymongo_test.command("ping") diff --git a/test/asynchronous/test_retryable_writes.py b/test/asynchronous/test_retryable_writes.py index ca2f0a5422..738ce04192 100644 --- a/test/asynchronous/test_retryable_writes.py +++ b/test/asynchronous/test_retryable_writes.py @@ -132,34 +132,27 @@ class IgnoreDeprecationsTest(AsyncIntegrationTest): RUN_ON_SERVERLESS = True deprecation_filter: DeprecationFilter - @classmethod - async def _setup_class(cls): - await super()._setup_class() - cls.deprecation_filter = DeprecationFilter() + async def asyncSetUp(self) -> None: + await super().asyncSetUp() + self.deprecation_filter = DeprecationFilter() - @classmethod - async def _tearDown_class(cls): - cls.deprecation_filter.stop() - await super()._tearDown_class() + async def asyncTearDown(self) -> None: + self.deprecation_filter.stop() class TestRetryableWritesMMAPv1(IgnoreDeprecationsTest): knobs: client_knobs - @classmethod - async def _setup_class(cls): - await super()._setup_class() + async def asyncSetUp(self) -> None: + await super().asyncSetUp() # Speed up the tests by decreasing the heartbeat frequency. - cls.knobs = client_knobs(heartbeat_frequency=0.1, min_heartbeat_interval=0.1) - cls.knobs.enable() - cls.client = await cls.unmanaged_async_rs_or_single_client(retryWrites=True) - cls.db = cls.client.pymongo_test + self.knobs = client_knobs(heartbeat_frequency=0.1, min_heartbeat_interval=0.1) + self.knobs.enable() + self.client = await self.async_rs_or_single_client(retryWrites=True) + self.db = self.client.pymongo_test - @classmethod - async def _tearDown_class(cls): - cls.knobs.disable() - await cls.client.close() - await super()._tearDown_class() + async def asyncTearDown(self) -> None: + self.knobs.disable() @async_client_context.require_no_standalone async def test_actionable_error_message(self): @@ -180,26 +173,18 @@ class TestRetryableWrites(IgnoreDeprecationsTest): listener: OvertCommandListener knobs: client_knobs - @classmethod @async_client_context.require_no_mmap - async def _setup_class(cls): - await super()._setup_class() + async def asyncSetUp(self) -> None: + await super().asyncSetUp() # Speed up the tests by decreasing the heartbeat frequency. - cls.knobs = client_knobs(heartbeat_frequency=0.1, min_heartbeat_interval=0.1) - cls.knobs.enable() - cls.listener = OvertCommandListener() - cls.client = await cls.unmanaged_async_rs_or_single_client( - retryWrites=True, event_listeners=[cls.listener] + self.knobs = client_knobs(heartbeat_frequency=0.1, min_heartbeat_interval=0.1) + self.knobs.enable() + self.listener = OvertCommandListener() + self.client = await self.async_rs_or_single_client( + retryWrites=True, event_listeners=[self.listener] ) - cls.db = cls.client.pymongo_test + self.db = self.client.pymongo_test - @classmethod - async def _tearDown_class(cls): - cls.knobs.disable() - await cls.client.close() - await super()._tearDown_class() - - async def asyncSetUp(self): if async_client_context.is_rs and async_client_context.test_commands_enabled: await self.client.admin.command( SON([("configureFailPoint", "onPrimaryTransactionalWrite"), ("mode", "alwaysOn")]) @@ -210,6 +195,7 @@ async def asyncTearDown(self): await self.client.admin.command( SON([("configureFailPoint", "onPrimaryTransactionalWrite"), ("mode", "off")]) ) + self.knobs.disable() async def test_supported_single_statement_no_retry(self): listener = OvertCommandListener() @@ -438,13 +424,12 @@ class TestWriteConcernError(AsyncIntegrationTest): RUN_ON_SERVERLESS = True fail_insert: dict - @classmethod @async_client_context.require_replica_set @async_client_context.require_no_mmap @async_client_context.require_failCommand_fail_point - async def _setup_class(cls): - await super()._setup_class() - cls.fail_insert = { + async def asyncSetUp(self) -> None: + await super().asyncSetUp() + self.fail_insert = { "configureFailPoint": "failCommand", "mode": {"times": 2}, "data": { diff --git a/test/asynchronous/test_session.py b/test/asynchronous/test_session.py index b432621798..42bc253b56 100644 --- a/test/asynchronous/test_session.py +++ b/test/asynchronous/test_session.py @@ -38,7 +38,6 @@ ExceptionCatchingThread, OvertCommandListener, async_wait_until, - wait_until, ) from bson import DBRef @@ -83,36 +82,27 @@ class TestSession(AsyncIntegrationTest): client2: AsyncMongoClient sensitive_commands: Set[str] - @classmethod @async_client_context.require_sessions - async def _setup_class(cls): - await super()._setup_class() + async def asyncSetUp(self): + await super().asyncSetUp() # Create a second client so we can make sure clients cannot share # sessions. - cls.client2 = await cls.unmanaged_async_rs_or_single_client() + self.client2 = await self.async_rs_or_single_client() # Redact no commands, so we can test user-admin commands have "lsid". - cls.sensitive_commands = monitoring._SENSITIVE_COMMANDS.copy() + self.sensitive_commands = monitoring._SENSITIVE_COMMANDS.copy() monitoring._SENSITIVE_COMMANDS.clear() - @classmethod - async def _tearDown_class(cls): - monitoring._SENSITIVE_COMMANDS.update(cls.sensitive_commands) - await cls.client2.close() - await super()._tearDown_class() - - async def asyncSetUp(self): self.listener = SessionTestListener() self.session_checker_listener = SessionTestListener() self.client = await self.async_rs_or_single_client( event_listeners=[self.listener, self.session_checker_listener] ) - self.addAsyncCleanup(self.client.close) self.db = self.client.pymongo_test self.initial_lsids = {s["id"] for s in session_ids(self.client)} async def asyncTearDown(self): - """All sessions used in the test must be returned to the pool.""" + monitoring._SENSITIVE_COMMANDS.update(self.sensitive_commands) await self.client.drop_database("pymongo_test") used_lsids = self.initial_lsids.copy() for event in self.session_checker_listener.started_events: @@ -122,6 +112,8 @@ async def asyncTearDown(self): current_lsids = {s["id"] for s in session_ids(self.client)} self.assertLessEqual(used_lsids, current_lsids) + await super().asyncTearDown() + async def _test_ops(self, client, *ops): listener = client.options.event_listeners[0] @@ -833,18 +825,11 @@ class TestCausalConsistency(AsyncUnitTest): listener: SessionTestListener client: AsyncMongoClient - @classmethod - async def _setup_class(cls): - cls.listener = SessionTestListener() - cls.client = await cls.unmanaged_async_rs_or_single_client(event_listeners=[cls.listener]) - - @classmethod - async def _tearDown_class(cls): - await cls.client.close() - @async_client_context.require_sessions async def asyncSetUp(self): await super().asyncSetUp() + self.listener = SessionTestListener() + self.client = await self.async_rs_or_single_client(event_listeners=[self.listener]) @async_client_context.require_no_standalone async def test_core(self): diff --git a/test/asynchronous/test_transactions.py b/test/asynchronous/test_transactions.py index b5d0686417..d11d0a9776 100644 --- a/test/asynchronous/test_transactions.py +++ b/test/asynchronous/test_transactions.py @@ -26,7 +26,7 @@ from test.asynchronous import AsyncIntegrationTest, async_client_context, unittest from test.utils import ( OvertCommandListener, - wait_until, + async_wait_until, ) from typing import List @@ -162,7 +162,7 @@ async def test_unpin_for_next_transaction(self): client = await self.async_rs_client( async_client_context.mongos_seeds(), localThresholdMS=1000 ) - wait_until(lambda: len(client.nodes) > 1, "discover both mongoses") + await async_wait_until(lambda: len(client.nodes) > 1, "discover both mongoses") coll = client.test.test # Create the collection. await coll.insert_one({}) @@ -191,7 +191,7 @@ async def test_unpin_for_non_transaction_operation(self): client = await self.async_rs_client( async_client_context.mongos_seeds(), localThresholdMS=1000 ) - wait_until(lambda: len(client.nodes) > 1, "discover both mongoses") + await async_wait_until(lambda: len(client.nodes) > 1, "discover both mongoses") coll = client.test.test # Create the collection. await coll.insert_one({}) @@ -403,21 +403,12 @@ def __exit__(self, exc_type, exc_val, exc_tb): class TestTransactionsConvenientAPI(AsyncTransactionsBase): - @classmethod - async def _setup_class(cls): - await super()._setup_class() - cls.mongos_clients = [] + async def asyncSetUp(self) -> None: + await super().asyncSetUp() + self.mongos_clients = [] if async_client_context.supports_transactions(): for address in async_client_context.mongoses: - cls.mongos_clients.append( - await cls.unmanaged_async_single_client("{}:{}".format(*address)) - ) - - @classmethod - async def _tearDown_class(cls): - for client in cls.mongos_clients: - await client.close() - await super()._tearDown_class() + self.mongos_clients.append(await self.async_single_client("{}:{}".format(*address))) async def _set_fail_point(self, client, command_args): cmd = {"configureFailPoint": "failCommand"} diff --git a/test/asynchronous/unified_format.py b/test/asynchronous/unified_format.py index db5ed81e24..b18b09383e 100644 --- a/test/asynchronous/unified_format.py +++ b/test/asynchronous/unified_format.py @@ -50,6 +50,7 @@ ) from test.utils import ( async_get_pool, + async_wait_until, camel_to_snake, camel_to_snake_args, parse_spec_options, @@ -304,7 +305,6 @@ async def _create_entity(self, entity_spec, uri=None): kwargs["h"] = uri client = await self.test.async_rs_or_single_client(**kwargs) self[spec["id"]] = client - self.test.addAsyncCleanup(client.close) return elif entity_type == "database": client = self[spec["client"]] @@ -479,54 +479,47 @@ async def insert_initial_data(self, initial_data): await db.create_collection(coll_name, write_concern=wc, **opts) @classmethod - async def _setup_class(cls): + def setUpClass(cls) -> None: + # Speed up the tests by decreasing the heartbeat frequency. + cls.knobs = client_knobs( + heartbeat_frequency=0.1, + min_heartbeat_interval=0.1, + kill_cursor_frequency=0.1, + events_queue_frequency=0.1, + ) + cls.knobs.enable() + + @classmethod + def tearDownClass(cls) -> None: + cls.knobs.disable() + + async def asyncSetUp(self): # super call creates internal client cls.client - await super()._setup_class() + await super().asyncSetUp() # process file-level runOnRequirements - run_on_spec = cls.TEST_SPEC.get("runOnRequirements", []) - if not await cls.should_run_on(run_on_spec): - raise unittest.SkipTest(f"{cls.__name__} runOnRequirements not satisfied") + run_on_spec = self.TEST_SPEC.get("runOnRequirements", []) + if not await self.should_run_on(run_on_spec): + raise unittest.SkipTest(f"{self.__class__.__name__} runOnRequirements not satisfied") # add any special-casing for skipping tests here if async_client_context.storage_engine == "mmapv1": - if "retryable-writes" in cls.TEST_SPEC["description"] or "retryable_writes" in str( - cls.TEST_PATH + if "retryable-writes" in self.TEST_SPEC["description"] or "retryable_writes" in str( + self.TEST_PATH ): raise unittest.SkipTest("MMAPv1 does not support retryWrites=True") # Handle mongos_clients for transactions tests. - cls.mongos_clients = [] + self.mongos_clients = [] if ( async_client_context.supports_transactions() and not async_client_context.load_balancer and not async_client_context.serverless ): for address in async_client_context.mongoses: - cls.mongos_clients.append( - await cls.unmanaged_async_single_client("{}:{}".format(*address)) - ) + self.mongos_clients.append(await self.async_single_client("{}:{}".format(*address))) - # Speed up the tests by decreasing the heartbeat frequency. - cls.knobs = client_knobs( - heartbeat_frequency=0.1, - min_heartbeat_interval=0.1, - kill_cursor_frequency=0.1, - events_queue_frequency=0.1, - ) - cls.knobs.enable() - - @classmethod - async def _tearDown_class(cls): - cls.knobs.disable() - for client in cls.mongos_clients: - await client.close() - await super()._tearDown_class() - - async def asyncSetUp(self): - await super().asyncSetUp() # process schemaVersion # note: we check major schema version during class generation - # note: we do this here because we cannot run assertions in setUpClass version = Version.from_string(self.TEST_SPEC["schemaVersion"]) self.assertLessEqual( version, @@ -1036,7 +1029,6 @@ async def _testOperation_targetedFailPoint(self, spec): ) client = await self.async_single_client("{}:{}".format(*session._pinned_address)) - self.addAsyncCleanup(client.close) await self.__set_fail_point(client=client, command_args=spec["failPoint"]) async def _testOperation_createEntities(self, spec): @@ -1137,13 +1129,13 @@ def _testOperation_assertEventCount(self, spec): client, event, count = spec["client"], spec["event"], spec["count"] self.assertEqual(self._event_count(client, event), count, f"expected {count} not {event!r}") - def _testOperation_waitForEvent(self, spec): + async def _testOperation_waitForEvent(self, spec): """Run the waitForEvent test operation. Wait for a number of events to be published, or fail. """ client, event, count = spec["client"], spec["event"], spec["count"] - wait_until( + await async_wait_until( lambda: self._event_count(client, event) >= count, f"find {count} {event} event(s)", ) diff --git a/test/asynchronous/utils_spec_runner.py b/test/asynchronous/utils_spec_runner.py index f27f52ec2c..b79e5258b5 100644 --- a/test/asynchronous/utils_spec_runner.py +++ b/test/asynchronous/utils_spec_runner.py @@ -249,30 +249,22 @@ class AsyncSpecRunner(AsyncIntegrationTest): knobs: client_knobs listener: EventListener - @classmethod - async def _setup_class(cls): - await super()._setup_class() - cls.mongos_clients = [] + async def asyncSetUp(self) -> None: + await super().asyncSetUp() + self.mongos_clients = [] # Speed up the tests by decreasing the heartbeat frequency. - cls.knobs = client_knobs(heartbeat_frequency=0.1, min_heartbeat_interval=0.1) - cls.knobs.enable() - - @classmethod - async def _tearDown_class(cls): - cls.knobs.disable() - for client in cls.mongos_clients: - await client.close() - await super()._tearDown_class() - - def setUp(self): - super().setUp() + self.knobs = client_knobs(heartbeat_frequency=0.1, min_heartbeat_interval=0.1) + self.knobs.enable() self.targets = {} self.listener = None # type: ignore self.pool_listener = None self.server_listener = None self.maxDiff = None + async def asyncTearDown(self) -> None: + self.knobs.disable() + async def _set_fail_point(self, client, command_args): cmd = SON([("configureFailPoint", "failCommand")]) cmd.update(command_args) @@ -700,8 +692,6 @@ async def run_scenario(self, scenario_def, test): self.listener = listener self.pool_listener = pool_listener self.server_listener = server_listener - # Close the client explicitly to avoid having too many threads open. - self.addAsyncCleanup(client.close) # Create session0 and session1. sessions = {} diff --git a/test/conftest.py b/test/conftest.py index a3d954c7c3..91fad28d0a 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -20,7 +20,7 @@ def event_loop_policy(): return asyncio.get_event_loop_policy() -@pytest.fixture(scope="session", autouse=True) +@pytest.fixture(scope="package", autouse=True) def test_setup_and_teardown(): setup() yield diff --git a/test/test_bulk.py b/test/test_bulk.py index ea2b803804..6d29ff510a 100644 --- a/test/test_bulk.py +++ b/test/test_bulk.py @@ -42,15 +42,11 @@ class BulkTestBase(IntegrationTest): coll: Collection coll_w0: Collection - @classmethod - def _setup_class(cls): - super()._setup_class() - cls.coll = cls.db.test - cls.coll_w0 = cls.coll.with_options(write_concern=WriteConcern(w=0)) - def setUp(self): super().setUp() + self.coll = self.db.test self.coll.drop() + self.coll_w0 = self.coll.with_options(write_concern=WriteConcern(w=0)) def assertEqualResponse(self, expected, actual): """Compare response from bulk.execute() to expected response.""" @@ -785,12 +781,8 @@ def test_large_inserts_unordered(self): class BulkAuthorizationTestBase(BulkTestBase): - @classmethod @client_context.require_auth @client_context.require_no_api_version - def _setup_class(cls): - super()._setup_class() - def setUp(self): super().setUp() client_context.create_user(self.db.name, "readonly", "pw", ["read"]) @@ -935,21 +927,19 @@ class TestBulkWriteConcern(BulkTestBase): w: Optional[int] secondary: MongoClient - @classmethod - def _setup_class(cls): - super()._setup_class() - cls.w = client_context.w - cls.secondary = None - if cls.w is not None and cls.w > 1: + def setUp(self): + super().setUp() + self.w = client_context.w + self.secondary = None + if self.w is not None and self.w > 1: for member in (client_context.hello)["hosts"]: if member != (client_context.hello)["primary"]: - cls.secondary = cls.unmanaged_single_client(*partition_node(member)) + self.secondary = self.single_client(*partition_node(member)) break - @classmethod - def async_tearDownClass(cls): - if cls.secondary: - cls.secondary.close() + def tearDown(self): + if self.secondary: + self.secondary.close() def cause_wtimeout(self, requests, ordered): if not client_context.test_commands_enabled: diff --git a/test/test_change_stream.py b/test/test_change_stream.py index 3a107122b7..4ed21f55cf 100644 --- a/test/test_change_stream.py +++ b/test/test_change_stream.py @@ -820,18 +820,16 @@ def test_split_large_change(self): class TestClusterChangeStream(TestChangeStreamBase, APITestsMixin): dbs: list - @classmethod @client_context.require_version_min(4, 0, 0, -1) @client_context.require_change_streams - def _setup_class(cls): - super()._setup_class() - cls.dbs = [cls.db, cls.client.pymongo_test_2] + def setUp(self) -> None: + super().setUp() + self.dbs = [self.db, self.client.pymongo_test_2] - @classmethod - def _tearDown_class(cls): - for db in cls.dbs: - cls.client.drop_database(db) - super()._tearDown_class() + def tearDown(self): + for db in self.dbs: + self.client.drop_database(db) + super().tearDown() def change_stream_with_client(self, client, *args, **kwargs): return client.watch(*args, **kwargs) @@ -882,11 +880,10 @@ def test_full_pipeline(self): class TestDatabaseChangeStream(TestChangeStreamBase, APITestsMixin): - @classmethod @client_context.require_version_min(4, 0, 0, -1) @client_context.require_change_streams - def _setup_class(cls): - super()._setup_class() + def setUp(self) -> None: + super().setUp() def change_stream_with_client(self, client, *args, **kwargs): return client[self.db.name].watch(*args, **kwargs) @@ -968,12 +965,9 @@ def test_isolation(self): class TestCollectionChangeStream(TestChangeStreamBase, APITestsMixin, ProseSpecTestsMixin): - @classmethod @client_context.require_change_streams - def _setup_class(cls): - super()._setup_class() - def setUp(self): + super().setUp() # Use a new collection for each test. self.watched_collection().drop() self.watched_collection().insert_one({}) @@ -1111,20 +1105,11 @@ class TestAllLegacyScenarios(IntegrationTest): RUN_ON_LOAD_BALANCER = True listener: AllowListEventListener - @classmethod @client_context.require_connection - def _setup_class(cls): - super()._setup_class() - cls.listener = AllowListEventListener("aggregate", "getMore") - cls.client = cls.unmanaged_rs_or_single_client(event_listeners=[cls.listener]) - - @classmethod - def _tearDown_class(cls): - cls.client.close() - super()._tearDown_class() - def setUp(self): super().setUp() + self.listener = AllowListEventListener("aggregate", "getMore") + self.client = self.rs_or_single_client(event_listeners=[self.listener]) self.listener.reset() def setUpCluster(self, scenario_dict): diff --git a/test/test_client.py b/test/test_client.py index 5bbb5bd751..5ec425f312 100644 --- a/test/test_client.py +++ b/test/test_client.py @@ -129,13 +129,8 @@ class ClientUnitTest(UnitTest): client: MongoClient - @classmethod - def _setup_class(cls): - cls.client = cls.unmanaged_rs_or_single_client(connect=False, serverSelectionTimeoutMS=100) - - @classmethod - def _tearDown_class(cls): - cls.client.close() + def setUp(self) -> None: + self.client = self.rs_or_single_client(connect=False, serverSelectionTimeoutMS=100) @pytest.fixture(autouse=True) def inject_fixtures(self, caplog): @@ -1039,14 +1034,21 @@ def test_uri_connect_option(self): self.assertFalse(client._topology._opened) # Ensure kill cursors thread has not been started. - kc_thread = client._kill_cursors_executor._thread - self.assertFalse(kc_thread and kc_thread.is_alive()) - + if _IS_SYNC: + kc_thread = client._kill_cursors_executor._thread + self.assertFalse(kc_thread and kc_thread.is_alive()) + else: + kc_task = client._kill_cursors_executor._task + self.assertFalse(kc_task and not kc_task.done()) # Using the client should open topology and start the thread. client.admin.command("ping") self.assertTrue(client._topology._opened) - kc_thread = client._kill_cursors_executor._thread - self.assertTrue(kc_thread and kc_thread.is_alive()) + if _IS_SYNC: + kc_thread = client._kill_cursors_executor._thread + self.assertTrue(kc_thread and kc_thread.is_alive()) + else: + kc_task = client._kill_cursors_executor._task + self.assertTrue(kc_task and not kc_task.done()) def test_close_does_not_open_servers(self): client = self.rs_client(connect=False) @@ -1241,6 +1243,7 @@ def get_x(db): def test_server_selection_timeout(self): client = MongoClient(serverSelectionTimeoutMS=100, connect=False) self.assertAlmostEqual(0.1, client.options.server_selection_timeout) + client.close() client = MongoClient(serverSelectionTimeoutMS=0, connect=False) @@ -1251,16 +1254,20 @@ def test_server_selection_timeout(self): self.assertRaises( ConfigurationError, MongoClient, serverSelectionTimeoutMS=None, connect=False ) + client.close() client = MongoClient("mongodb://localhost/?serverSelectionTimeoutMS=100", connect=False) self.assertAlmostEqual(0.1, client.options.server_selection_timeout) + client.close() client = MongoClient("mongodb://localhost/?serverSelectionTimeoutMS=0", connect=False) self.assertAlmostEqual(0, client.options.server_selection_timeout) + client.close() # Test invalid timeout in URI ignored and set to default. client = MongoClient("mongodb://localhost/?serverSelectionTimeoutMS=-1", connect=False) self.assertAlmostEqual(30, client.options.server_selection_timeout) + client.close() client = MongoClient("mongodb://localhost/?serverSelectionTimeoutMS=", connect=False) self.assertAlmostEqual(30, client.options.server_selection_timeout) diff --git a/test/test_collation.py b/test/test_collation.py index b878df2fb4..06436f0638 100644 --- a/test/test_collation.py +++ b/test/test_collation.py @@ -97,26 +97,19 @@ class TestCollation(IntegrationTest): warn_context: Any collation: Collation - @classmethod @client_context.require_connection - def _setup_class(cls): - super()._setup_class() - cls.listener = OvertCommandListener() - cls.client = cls.unmanaged_rs_or_single_client(event_listeners=[cls.listener]) - cls.db = cls.client.pymongo_test - cls.collation = Collation("en_US") - cls.warn_context = warnings.catch_warnings() - cls.warn_context.__enter__() - warnings.simplefilter("ignore", DeprecationWarning) - - @classmethod - def _tearDown_class(cls): - cls.warn_context.__exit__() - cls.warn_context = None - cls.client.close() - super()._tearDown_class() - - def tearDown(self): + def setUp(self) -> None: + super().setUp() + self.listener = OvertCommandListener() + self.client = self.rs_or_single_client(event_listeners=[self.listener]) + self.db = self.client.pymongo_test + self.collation = Collation("en_US") + self.warn_context = warnings.catch_warnings() + self.warn_context.__enter__() + + def tearDown(self) -> None: + self.warn_context.__exit__() + self.warn_context = None self.listener.reset() super().tearDown() diff --git a/test/test_collection.py b/test/test_collection.py index 84a900d45b..af524bba47 100644 --- a/test/test_collection.py +++ b/test/test_collection.py @@ -87,14 +87,10 @@ class TestCollectionNoConnect(UnitTest): db: Database client: MongoClient - @classmethod - def _setup_class(cls): - cls.client = MongoClient(connect=False) - cls.db = cls.client.pymongo_test - - @classmethod - def _tearDown_class(cls): - cls.client.close() + def setUp(self) -> None: + super().setUp() + self.client = self.simple_client(connect=False) + self.db = self.client.pymongo_test def test_collection(self): self.assertRaises(TypeError, Collection, self.db, 5) @@ -164,27 +160,14 @@ def test_iteration(self): class TestCollection(IntegrationTest): w: int - @classmethod - def setUpClass(cls): - super().setUpClass() - cls.w = client_context.w # type: ignore - - @classmethod - def tearDownClass(cls): - if _IS_SYNC: - cls.db.drop_collection("test_large_limit") # type: ignore[unused-coroutine] - else: - asyncio.run(cls.async_tearDownClass()) - - @classmethod - def async_tearDownClass(cls): - cls.db.drop_collection("test_large_limit") - def setUp(self): - self.db.test.drop() + super().setUp() + self.w = client_context.w # type: ignore def tearDown(self): self.db.test.drop() + self.db.drop_collection("test_large_limit") + super().tearDown() @contextlib.contextmanager def write_concern_collection(self): @@ -1010,7 +993,10 @@ def test_replace_bypass_document_validation(self): db.test.insert_one({"y": 1}, bypass_document_validation=True) db_w0.test.replace_one({"y": 1}, {"x": 1}, bypass_document_validation=True) - wait_until(lambda: db_w0.test.find_one({"x": 1}), "find w:0 replaced document") + def predicate(): + return db_w0.test.find_one({"x": 1}) + + wait_until(predicate, "find w:0 replaced document") def test_update_bypass_document_validation(self): db = self.db diff --git a/test/test_connections_survive_primary_stepdown_spec.py b/test/test_connections_survive_primary_stepdown_spec.py index 54cc4e0482..84ef6decd5 100644 --- a/test/test_connections_survive_primary_stepdown_spec.py +++ b/test/test_connections_survive_primary_stepdown_spec.py @@ -19,7 +19,12 @@ sys.path[0:0] = [""] -from test import IntegrationTest, client_context, unittest +from test import ( + IntegrationTest, + client_context, + reset_client_context, + unittest, +) from test.helpers import repl_set_step_down from test.utils import ( CMAPListener, @@ -39,29 +44,19 @@ class TestConnectionsSurvivePrimaryStepDown(IntegrationTest): listener: CMAPListener coll: Collection - @classmethod @client_context.require_replica_set - def _setup_class(cls): - super()._setup_class() - cls.listener = CMAPListener() - cls.client = cls.unmanaged_rs_or_single_client( - event_listeners=[cls.listener], retryWrites=False, heartbeatFrequencyMS=500 + def setUp(self): + self.listener = CMAPListener() + self.client = self.rs_or_single_client( + event_listeners=[self.listener], retryWrites=False, heartbeatFrequencyMS=500 ) # Ensure connections to all servers in replica set. This is to test # that the is_writable flag is properly updated for connections that # survive a replica set election. - ensure_all_connected(cls.client) - cls.listener.reset() - - cls.db = cls.client.get_database("step-down", write_concern=WriteConcern("majority")) - cls.coll = cls.db.get_collection("step-down", write_concern=WriteConcern("majority")) - - @classmethod - def _tearDown_class(cls): - cls.client.close() - - def setUp(self): + ensure_all_connected(self.client) + self.db = self.client.get_database("step-down", write_concern=WriteConcern("majority")) + self.coll = self.db.get_collection("step-down", write_concern=WriteConcern("majority")) # Note that all ops use same write-concern as self.db (majority). self.db.drop_collection("step-down") self.db.create_collection("step-down") diff --git a/test/test_create_entities.py b/test/test_create_entities.py index ad75fe5702..9d77a08eee 100644 --- a/test/test_create_entities.py +++ b/test/test_create_entities.py @@ -56,6 +56,9 @@ def test_store_events_as_entities(self): self.assertGreater(len(final_entity_map["events1"]), 0) for event in final_entity_map["events1"]: self.assertIn("PoolCreatedEvent", event["name"]) + if self.scenario_runner.mongos_clients: + for client in self.scenario_runner.mongos_clients: + client.close() def test_store_all_others_as_entities(self): self.scenario_runner = UnifiedSpecTestMixinV1() @@ -122,6 +125,9 @@ def test_store_all_others_as_entities(self): self.assertEqual(entity_map["failures"], []) self.assertEqual(entity_map["successes"], 2) self.assertEqual(entity_map["iterations"], 5) + if self.scenario_runner.mongos_clients: + for client in self.scenario_runner.mongos_clients: + client.close() if __name__ == "__main__": diff --git a/test/test_cursor.py b/test/test_cursor.py index 9eac0f1c49..bcc7ed75f1 100644 --- a/test/test_cursor.py +++ b/test/test_cursor.py @@ -1636,10 +1636,6 @@ def test_monitoring(self): class TestRawBatchCommandCursor(IntegrationTest): - @classmethod - def _setup_class(cls): - super()._setup_class() - def test_aggregate_raw(self): c = self.db.test c.drop() diff --git a/test/test_custom_types.py b/test/test_custom_types.py index abaa820cb7..6771ea25f9 100644 --- a/test/test_custom_types.py +++ b/test/test_custom_types.py @@ -633,6 +633,7 @@ class MyType(pytype): # type: ignore class TestCollectionWCustomType(IntegrationTest): def setUp(self): + super().setUp() self.db.test.drop() def tearDown(self): @@ -754,6 +755,7 @@ def test_find_one_and__w_custom_type_decoder(self): class TestGridFileCustomType(IntegrationTest): def setUp(self): + super().setUp() self.db.drop_collection("fs.files") self.db.drop_collection("fs.chunks") @@ -917,11 +919,10 @@ def run_test(doc_cls): class TestCollectionChangeStreamsWCustomTypes(IntegrationTest, ChangeStreamsWCustomTypesTestMixin): - @classmethod @client_context.require_change_streams - def setUpClass(cls): - super().setUpClass() - cls.db.test.delete_many({}) + def setUp(self): + super().setUp() + self.db.test.delete_many({}) def tearDown(self): self.input_target.drop() @@ -935,12 +936,11 @@ def create_targets(self, *args, **kwargs): class TestDatabaseChangeStreamsWCustomTypes(IntegrationTest, ChangeStreamsWCustomTypesTestMixin): - @classmethod @client_context.require_version_min(4, 0, 0) @client_context.require_change_streams - def setUpClass(cls): - super().setUpClass() - cls.db.test.delete_many({}) + def setUp(self): + super().setUp() + self.db.test.delete_many({}) def tearDown(self): self.input_target.drop() @@ -954,12 +954,11 @@ def create_targets(self, *args, **kwargs): class TestClusterChangeStreamsWCustomTypes(IntegrationTest, ChangeStreamsWCustomTypesTestMixin): - @classmethod @client_context.require_version_min(4, 0, 0) @client_context.require_change_streams - def setUpClass(cls): - super().setUpClass() - cls.db.test.delete_many({}) + def setUp(self): + super().setUp() + self.db.test.delete_many({}) def tearDown(self): self.input_target.drop() diff --git a/test/test_database.py b/test/test_database.py index 4973ed0134..5e854c941d 100644 --- a/test/test_database.py +++ b/test/test_database.py @@ -709,6 +709,7 @@ def test_with_options(self): class TestDatabaseAggregation(IntegrationTest): def setUp(self): + super().setUp() self.pipeline: List[Mapping[str, Any]] = [ {"$listLocalSessions": {}}, {"$limit": 1}, diff --git a/test/test_encryption.py b/test/test_encryption.py index 0806f91a06..cb8bcb74d6 100644 --- a/test/test_encryption.py +++ b/test/test_encryption.py @@ -211,11 +211,10 @@ def test_kwargs(self): class EncryptionIntegrationTest(IntegrationTest): """Base class for encryption integration tests.""" - @classmethod @unittest.skipUnless(_HAVE_PYMONGOCRYPT, "pymongocrypt is not installed") @client_context.require_version_min(4, 2, -1) - def _setup_class(cls): - super()._setup_class() + def setUp(self) -> None: + super().setUp() def assertEncrypted(self, val): self.assertIsInstance(val, Binary) @@ -430,10 +429,9 @@ def test_upsert_uuid_standard_encrypt(self): class TestClientMaxWireVersion(IntegrationTest): - @classmethod @unittest.skipUnless(_HAVE_PYMONGOCRYPT, "pymongocrypt is not installed") - def _setup_class(cls): - super()._setup_class() + def setUp(self): + super().setUp() @client_context.require_version_max(4, 0, 99) def test_raise_max_wire_version_error(self): @@ -816,17 +814,16 @@ class TestDataKeyDoubleEncryption(EncryptionIntegrationTest): "local": None, } - @classmethod @unittest.skipUnless( any([all(AWS_CREDS.values()), all(AZURE_CREDS.values()), all(GCP_CREDS.values())]), "No environment credentials are set", ) - def _setup_class(cls): - super()._setup_class() - cls.listener = OvertCommandListener() - cls.client = cls.unmanaged_rs_or_single_client(event_listeners=[cls.listener]) - cls.client.db.coll.drop() - cls.vault = create_key_vault(cls.client.keyvault.datakeys) + def setUp(self): + super().setUp() + self.listener = OvertCommandListener() + self.client = self.rs_or_single_client(event_listeners=[self.listener]) + self.client.db.coll.drop() + self.vault = create_key_vault(self.client.keyvault.datakeys) # Configure the encrypted field via the local schema_map option. schemas = { @@ -844,25 +841,22 @@ def _setup_class(cls): } } opts = AutoEncryptionOpts( - cls.KMS_PROVIDERS, "keyvault.datakeys", schema_map=schemas, kms_tls_options=KMS_TLS_OPTS + self.KMS_PROVIDERS, + "keyvault.datakeys", + schema_map=schemas, + kms_tls_options=KMS_TLS_OPTS, ) - cls.client_encrypted = cls.unmanaged_rs_or_single_client( + self.client_encrypted = self.rs_or_single_client( auto_encryption_opts=opts, uuidRepresentation="standard" ) - cls.client_encryption = cls.unmanaged_create_client_encryption( - cls.KMS_PROVIDERS, "keyvault.datakeys", cls.client, OPTS, kms_tls_options=KMS_TLS_OPTS + self.client_encryption = self.create_client_encryption( + self.KMS_PROVIDERS, "keyvault.datakeys", self.client, OPTS, kms_tls_options=KMS_TLS_OPTS ) - - @classmethod - def _tearDown_class(cls): - cls.vault.drop() - cls.client.close() - cls.client_encrypted.close() - cls.client_encryption.close() - - def setUp(self): self.listener.reset() + def tearDown(self) -> None: + self.vault.drop() + def run_test(self, provider_name): # Create data key. master_key: Any = self.MASTER_KEYS[provider_name] @@ -1007,10 +1001,9 @@ def test_views_are_prohibited(self): class TestCorpus(EncryptionIntegrationTest): - @classmethod @unittest.skipUnless(any(AWS_CREDS.values()), "AWS environment credentials are not set") - def _setup_class(cls): - super()._setup_class() + def setUp(self): + super().setUp() @staticmethod def kms_providers(): @@ -1184,12 +1177,11 @@ class TestBsonSizeBatches(EncryptionIntegrationTest): client_encrypted: MongoClient listener: OvertCommandListener - @classmethod - def _setup_class(cls): - super()._setup_class() + def setUp(self): + super().setUp() db = client_context.client.db - cls.coll = db.coll - cls.coll.drop() + self.coll = db.coll + self.coll.drop() # Configure the encrypted 'db.coll' collection via jsonSchema. json_schema = json_data("limits", "limits-schema.json") db.create_collection( @@ -1207,17 +1199,14 @@ def _setup_class(cls): coll.insert_one(json_data("limits", "limits-key.json")) opts = AutoEncryptionOpts({"local": {"key": LOCAL_MASTER_KEY}}, "keyvault.datakeys") - cls.listener = OvertCommandListener() - cls.client_encrypted = cls.unmanaged_rs_or_single_client( - auto_encryption_opts=opts, event_listeners=[cls.listener] + self.listener = OvertCommandListener() + self.client_encrypted = self.rs_or_single_client( + auto_encryption_opts=opts, event_listeners=[self.listener] ) - cls.coll_encrypted = cls.client_encrypted.db.coll + self.coll_encrypted = self.client_encrypted.db.coll - @classmethod - def _tearDown_class(cls): - cls.coll_encrypted.drop() - cls.client_encrypted.close() - super()._tearDown_class() + def tearDown(self) -> None: + self.coll_encrypted.drop() def test_01_insert_succeeds_under_2MiB(self): doc = {"_id": "over_2mib_under_16mib", "unencrypted": "a" * _2_MiB} @@ -1241,7 +1230,9 @@ def test_03_bulk_batch_split(self): doc2 = {"_id": "over_2mib_2", "unencrypted": "a" * _2_MiB} self.listener.reset() self.coll_encrypted.bulk_write([InsertOne(doc1), InsertOne(doc2)]) - self.assertEqual(self.listener.started_command_names(), ["insert", "insert"]) + self.assertEqual( + len([c for c in self.listener.started_command_names() if c == "insert"]), 2 + ) def test_04_bulk_batch_split(self): limits_doc = json_data("limits", "limits-doc.json") @@ -1251,7 +1242,9 @@ def test_04_bulk_batch_split(self): doc2.update(limits_doc) self.listener.reset() self.coll_encrypted.bulk_write([InsertOne(doc1), InsertOne(doc2)]) - self.assertEqual(self.listener.started_command_names(), ["insert", "insert"]) + self.assertEqual( + len([c for c in self.listener.started_command_names() if c == "insert"]), 2 + ) def test_05_insert_succeeds_just_under_16MiB(self): doc = {"_id": "under_16mib", "unencrypted": "a" * (_16_MiB - 2000)} @@ -1281,15 +1274,12 @@ def test_06_insert_fails_over_16MiB(self): class TestCustomEndpoint(EncryptionIntegrationTest): """Prose tests for creating data keys with a custom endpoint.""" - @classmethod @unittest.skipUnless( any([all(AWS_CREDS.values()), all(AZURE_CREDS.values()), all(GCP_CREDS.values())]), "No environment credentials are set", ) - def _setup_class(cls): - super()._setup_class() - def setUp(self): + super().setUp() kms_providers = { "aws": AWS_CREDS, "azure": AZURE_CREDS, @@ -1318,10 +1308,6 @@ def setUp(self): self._kmip_host_error = None self._invalid_host_error = None - def tearDown(self): - self.client_encryption.close() - self.client_encryption_invalid.close() - def run_test_expected_success(self, provider_name, master_key): data_key_id = self.client_encryption.create_data_key(provider_name, master_key=master_key) encrypted = self.client_encryption.encrypt( @@ -1494,18 +1480,18 @@ class AzureGCPEncryptionTestMixin(EncryptionIntegrationTest): KEYVAULT_COLL = "datakeys" client: MongoClient - def setUp(self): + def _setup(self): keyvault = self.client.get_database(self.KEYVAULT_DB).get_collection(self.KEYVAULT_COLL) create_key_vault(keyvault, self.DEK) def _test_explicit(self, expectation): + self._setup() client_encryption = self.create_client_encryption( self.KMS_PROVIDER_MAP, # type: ignore[arg-type] ".".join([self.KEYVAULT_DB, self.KEYVAULT_COLL]), client_context.client, OPTS, ) - self.addCleanup(client_encryption.close) ciphertext = client_encryption.encrypt( "string0", @@ -1517,6 +1503,7 @@ def _test_explicit(self, expectation): self.assertEqual(client_encryption.decrypt(ciphertext), "string0") def _test_automatic(self, expectation_extjson, payload): + self._setup() encrypted_db = "db" encrypted_coll = "coll" keyvault_namespace = ".".join([self.KEYVAULT_DB, self.KEYVAULT_COLL]) @@ -1531,7 +1518,6 @@ def _test_automatic(self, expectation_extjson, payload): client = self.rs_or_single_client( auto_encryption_opts=encryption_opts, event_listeners=[insert_listener] ) - self.addCleanup(client.close) coll = client.get_database(encrypted_db).get_collection( encrypted_coll, codec_options=OPTS, write_concern=WriteConcern("majority") @@ -1553,13 +1539,12 @@ def _test_automatic(self, expectation_extjson, payload): class TestAzureEncryption(AzureGCPEncryptionTestMixin, EncryptionIntegrationTest): - @classmethod @unittest.skipUnless(any(AZURE_CREDS.values()), "Azure environment credentials are not set") - def _setup_class(cls): - cls.KMS_PROVIDER_MAP = {"azure": AZURE_CREDS} - cls.DEK = json_data(BASE, "custom", "azure-dek.json") - cls.SCHEMA_MAP = json_data(BASE, "custom", "azure-gcp-schema.json") - super()._setup_class() + def setUp(self): + self.KMS_PROVIDER_MAP = {"azure": AZURE_CREDS} + self.DEK = json_data(BASE, "custom", "azure-dek.json") + self.SCHEMA_MAP = json_data(BASE, "custom", "azure-gcp-schema.json") + super().setUp() def test_explicit(self): return self._test_explicit( @@ -1579,13 +1564,12 @@ def test_automatic(self): class TestGCPEncryption(AzureGCPEncryptionTestMixin, EncryptionIntegrationTest): - @classmethod @unittest.skipUnless(any(GCP_CREDS.values()), "GCP environment credentials are not set") - def _setup_class(cls): - cls.KMS_PROVIDER_MAP = {"gcp": GCP_CREDS} - cls.DEK = json_data(BASE, "custom", "gcp-dek.json") - cls.SCHEMA_MAP = json_data(BASE, "custom", "azure-gcp-schema.json") - super()._setup_class() + def setUp(self): + self.KMS_PROVIDER_MAP = {"gcp": GCP_CREDS} + self.DEK = json_data(BASE, "custom", "gcp-dek.json") + self.SCHEMA_MAP = json_data(BASE, "custom", "azure-gcp-schema.json") + super().setUp() def test_explicit(self): return self._test_explicit( @@ -1607,6 +1591,7 @@ def test_automatic(self): # https://github.com/mongodb/specifications/blob/master/source/client-side-encryption/tests/README.md#deadlock-tests class TestDeadlockProse(EncryptionIntegrationTest): def setUp(self): + super().setUp() self.client_test = self.rs_or_single_client( maxPoolSize=1, readConcernLevel="majority", w="majority", uuidRepresentation="standard" ) @@ -1637,7 +1622,6 @@ def setUp(self): self.ciphertext = client_encryption.encrypt( "string0", Algorithm.AEAD_AES_256_CBC_HMAC_SHA_512_Deterministic, key_alt_name="local" ) - client_encryption.close() self.client_listener = OvertCommandListener() self.topology_listener = TopologyEventListener() @@ -1832,6 +1816,7 @@ def test_case_8(self): # https://github.com/mongodb/specifications/blob/master/source/client-side-encryption/tests/README.md#14-decryption-events class TestDecryptProse(EncryptionIntegrationTest): def setUp(self): + super().setUp() self.client = client_context.client self.client.db.drop_collection("decryption_events") create_key_vault(self.client.keyvault.datakeys) @@ -2267,6 +2252,7 @@ def test_06_named_kms_providers_apply_tls_options_kmip(self): # https://github.com/mongodb/specifications/blob/50e26fe/source/client-side-encryption/tests/README.md#unique-index-on-keyaltnames class TestUniqueIndexOnKeyAltNamesProse(EncryptionIntegrationTest): def setUp(self): + super().setUp() self.client = client_context.client create_key_vault(self.client.keyvault.datakeys) kms_providers_map = {"local": {"key": LOCAL_MASTER_KEY}} @@ -2608,8 +2594,6 @@ def MongoClient(**kwargs): assert isinstance(res["encrypted_indexed"], Binary) assert isinstance(res["encrypted_unindexed"], Binary) - client_encryption.close() - # https://github.com/mongodb/specifications/blob/master/source/client-side-encryption/tests/README.md#22-range-explicit-encryption class TestRangeQueryProse(EncryptionIntegrationTest): @@ -3071,17 +3055,11 @@ class TestNoSessionsSupport(EncryptionIntegrationTest): mongocryptd_client: MongoClient MONGOCRYPTD_PORT = 27020 - @classmethod @unittest.skipIf(os.environ.get("TEST_CRYPT_SHARED"), "crypt_shared lib is installed") - def _setup_class(cls): - super()._setup_class() - start_mongocryptd(cls.MONGOCRYPTD_PORT) - - @classmethod - def _tearDown_class(cls): - super()._tearDown_class() - def setUp(self) -> None: + super().setUp() + start_mongocryptd(self.MONGOCRYPTD_PORT) + self.listener = OvertCommandListener() self.mongocryptd_client = self.simple_client( f"mongodb://localhost:{self.MONGOCRYPTD_PORT}", event_listeners=[self.listener] diff --git a/test/test_examples.py b/test/test_examples.py index ebf1d784a3..7f98226e7a 100644 --- a/test/test_examples.py +++ b/test/test_examples.py @@ -33,19 +33,14 @@ class TestSampleShellCommands(IntegrationTest): - @classmethod - def setUpClass(cls): - super().setUpClass() - # Run once before any tests run. - cls.db.inventory.drop() - - @classmethod - def tearDownClass(cls): - cls.client.drop_database("pymongo_test") + def setUp(self): + super().setUp() + self.db.inventory.drop() def tearDown(self): # Run after every test. self.db.inventory.drop() + self.client.drop_database("pymongo_test") def test_first_three_examples(self): db = self.db diff --git a/test/test_grid_file.py b/test/test_grid_file.py index c35efccef5..6534bc11bf 100644 --- a/test/test_grid_file.py +++ b/test/test_grid_file.py @@ -97,6 +97,7 @@ def test_grid_in_custom_opts(self): class TestGridFile(IntegrationTest): def setUp(self): + super().setUp() self.cleanup_colls(self.db.fs.files, self.db.fs.chunks) def test_basic(self): diff --git a/test/test_gridfs.py b/test/test_gridfs.py index 549dc0b204..a36109f399 100644 --- a/test/test_gridfs.py +++ b/test/test_gridfs.py @@ -75,9 +75,9 @@ def run(self): class TestGridfsNoConnect(unittest.TestCase): db: Database - @classmethod - def setUpClass(cls): - cls.db = MongoClient(connect=False).pymongo_test + def setUp(self): + super().setUp() + self.db = MongoClient(connect=False).pymongo_test def test_gridfs(self): self.assertRaises(TypeError, gridfs.GridFS, "foo") @@ -88,13 +88,10 @@ class TestGridfs(IntegrationTest): fs: gridfs.GridFS alt: gridfs.GridFS - @classmethod - def setUpClass(cls): - super().setUpClass() - cls.fs = gridfs.GridFS(cls.db) - cls.alt = gridfs.GridFS(cls.db, "alt") - def setUp(self): + super().setUp() + self.fs = gridfs.GridFS(self.db) + self.alt = gridfs.GridFS(self.db, "alt") self.cleanup_colls( self.db.fs.files, self.db.fs.chunks, self.db.alt.files, self.db.alt.chunks ) @@ -509,10 +506,9 @@ def test_md5(self): class TestGridfsReplicaSet(IntegrationTest): - @classmethod @client_context.require_secondaries_count(1) - def setUpClass(cls): - super().setUpClass() + def setUp(self): + super().setUp() @classmethod def tearDownClass(cls): diff --git a/test/test_gridfs_bucket.py b/test/test_gridfs_bucket.py index 28adb7051a..04c7427350 100644 --- a/test/test_gridfs_bucket.py +++ b/test/test_gridfs_bucket.py @@ -79,13 +79,10 @@ class TestGridfs(IntegrationTest): fs: gridfs.GridFSBucket alt: gridfs.GridFSBucket - @classmethod - def setUpClass(cls): - super().setUpClass() - cls.fs = gridfs.GridFSBucket(cls.db) - cls.alt = gridfs.GridFSBucket(cls.db, bucket_name="alt") - def setUp(self): + super().setUp() + self.fs = gridfs.GridFSBucket(self.db) + self.alt = gridfs.GridFSBucket(self.db, bucket_name="alt") self.cleanup_colls( self.db.fs.files, self.db.fs.chunks, self.db.alt.files, self.db.alt.chunks ) @@ -479,10 +476,9 @@ def test_md5(self): class TestGridfsBucketReplicaSet(IntegrationTest): - @classmethod @client_context.require_secondaries_count(1) - def setUpClass(cls): - super().setUpClass() + def setUp(self): + super().setUp() @classmethod def tearDownClass(cls): diff --git a/test/test_monitor.py b/test/test_monitor.py index f8e9443fae..a704f3d8cb 100644 --- a/test/test_monitor.py +++ b/test/test_monitor.py @@ -29,7 +29,7 @@ wait_until, ) -from pymongo.synchronous.periodic_executor import _EXECUTORS +from pymongo.periodic_executor import _EXECUTORS def unregistered(ref): diff --git a/test/test_monitoring.py b/test/test_monitoring.py index 75fe5c987a..670558c0a0 100644 --- a/test/test_monitoring.py +++ b/test/test_monitoring.py @@ -52,22 +52,14 @@ class TestCommandMonitoring(IntegrationTest): listener: EventListener @classmethod - @client_context.require_connection - def _setup_class(cls): - super()._setup_class() + def setUpClass(cls) -> None: cls.listener = OvertCommandListener() - cls.client = cls.unmanaged_rs_or_single_client( - event_listeners=[cls.listener], retryWrites=False - ) - @classmethod - def _tearDown_class(cls): - cls.client.close() - super()._tearDown_class() - - def tearDown(self): + @client_context.require_connection + def setUp(self) -> None: + super().setUp() self.listener.reset() - super().tearDown() + self.client = self.rs_or_single_client(event_listeners=[self.listener], retryWrites=False) def test_started_simple(self): self.client.pymongo_test.command("ping") @@ -1140,26 +1132,23 @@ class TestGlobalListener(IntegrationTest): saved_listeners: Any @classmethod - @client_context.require_connection - def _setup_class(cls): - super()._setup_class() + def setUpClass(cls) -> None: cls.listener = OvertCommandListener() # We plan to call register(), which internally modifies _LISTENERS. cls.saved_listeners = copy.deepcopy(monitoring._LISTENERS) monitoring.register(cls.listener) - cls.client = cls.unmanaged_single_client() - # Get one (authenticated) socket in the pool. - cls.client.pymongo_test.command("ping") - - @classmethod - def _tearDown_class(cls): - monitoring._LISTENERS = cls.saved_listeners - cls.client.close() - super()._tearDown_class() + @client_context.require_connection def setUp(self): super().setUp() self.listener.reset() + self.client = self.single_client() + # Get one (authenticated) socket in the pool. + self.client.pymongo_test.command("ping") + + @classmethod + def tearDownClass(cls): + monitoring._LISTENERS = cls.saved_listeners def test_simple(self): self.client.pymongo_test.command("ping") diff --git a/test/test_read_concern.py b/test/test_read_concern.py index ea9ce49a30..f7c0901422 100644 --- a/test/test_read_concern.py +++ b/test/test_read_concern.py @@ -31,24 +31,16 @@ class TestReadConcern(IntegrationTest): listener: OvertCommandListener - @classmethod @client_context.require_connection - def setUpClass(cls): - super().setUpClass() - cls.listener = OvertCommandListener() - cls.client = cls.unmanaged_rs_or_single_client(event_listeners=[cls.listener]) - cls.db = cls.client.pymongo_test + def setUp(self): + super().setUp() + self.listener = OvertCommandListener() + self.client = self.rs_or_single_client(event_listeners=[self.listener]) + self.db = self.client.pymongo_test client_context.client.pymongo_test.create_collection("coll") - @classmethod - def tearDownClass(cls): - cls.client.close() - client_context.client.pymongo_test.drop_collection("coll") - super().tearDownClass() - def tearDown(self): - self.listener.reset() - super().tearDown() + client_context.client.pymongo_test.drop_collection("coll") def test_read_concern(self): rc = ReadConcern() diff --git a/test/test_retryable_writes.py b/test/test_retryable_writes.py index 74f3c23e51..07bd1db0ba 100644 --- a/test/test_retryable_writes.py +++ b/test/test_retryable_writes.py @@ -132,34 +132,27 @@ class IgnoreDeprecationsTest(IntegrationTest): RUN_ON_SERVERLESS = True deprecation_filter: DeprecationFilter - @classmethod - def _setup_class(cls): - super()._setup_class() - cls.deprecation_filter = DeprecationFilter() + def setUp(self) -> None: + super().setUp() + self.deprecation_filter = DeprecationFilter() - @classmethod - def _tearDown_class(cls): - cls.deprecation_filter.stop() - super()._tearDown_class() + def tearDown(self) -> None: + self.deprecation_filter.stop() class TestRetryableWritesMMAPv1(IgnoreDeprecationsTest): knobs: client_knobs - @classmethod - def _setup_class(cls): - super()._setup_class() + def setUp(self) -> None: + super().setUp() # Speed up the tests by decreasing the heartbeat frequency. - cls.knobs = client_knobs(heartbeat_frequency=0.1, min_heartbeat_interval=0.1) - cls.knobs.enable() - cls.client = cls.unmanaged_rs_or_single_client(retryWrites=True) - cls.db = cls.client.pymongo_test + self.knobs = client_knobs(heartbeat_frequency=0.1, min_heartbeat_interval=0.1) + self.knobs.enable() + self.client = self.rs_or_single_client(retryWrites=True) + self.db = self.client.pymongo_test - @classmethod - def _tearDown_class(cls): - cls.knobs.disable() - cls.client.close() - super()._tearDown_class() + def tearDown(self) -> None: + self.knobs.disable() @client_context.require_no_standalone def test_actionable_error_message(self): @@ -180,26 +173,16 @@ class TestRetryableWrites(IgnoreDeprecationsTest): listener: OvertCommandListener knobs: client_knobs - @classmethod @client_context.require_no_mmap - def _setup_class(cls): - super()._setup_class() + def setUp(self) -> None: + super().setUp() # Speed up the tests by decreasing the heartbeat frequency. - cls.knobs = client_knobs(heartbeat_frequency=0.1, min_heartbeat_interval=0.1) - cls.knobs.enable() - cls.listener = OvertCommandListener() - cls.client = cls.unmanaged_rs_or_single_client( - retryWrites=True, event_listeners=[cls.listener] - ) - cls.db = cls.client.pymongo_test - - @classmethod - def _tearDown_class(cls): - cls.knobs.disable() - cls.client.close() - super()._tearDown_class() + self.knobs = client_knobs(heartbeat_frequency=0.1, min_heartbeat_interval=0.1) + self.knobs.enable() + self.listener = OvertCommandListener() + self.client = self.rs_or_single_client(retryWrites=True, event_listeners=[self.listener]) + self.db = self.client.pymongo_test - def setUp(self): if client_context.is_rs and client_context.test_commands_enabled: self.client.admin.command( SON([("configureFailPoint", "onPrimaryTransactionalWrite"), ("mode", "alwaysOn")]) @@ -210,6 +193,7 @@ def tearDown(self): self.client.admin.command( SON([("configureFailPoint", "onPrimaryTransactionalWrite"), ("mode", "off")]) ) + self.knobs.disable() def test_supported_single_statement_no_retry(self): listener = OvertCommandListener() @@ -438,13 +422,12 @@ class TestWriteConcernError(IntegrationTest): RUN_ON_SERVERLESS = True fail_insert: dict - @classmethod @client_context.require_replica_set @client_context.require_no_mmap @client_context.require_failCommand_fail_point - def _setup_class(cls): - super()._setup_class() - cls.fail_insert = { + def setUp(self) -> None: + super().setUp() + self.fail_insert = { "configureFailPoint": "failCommand", "mode": {"times": 2}, "data": { diff --git a/test/test_sdam_monitoring_spec.py b/test/test_sdam_monitoring_spec.py index 81b208d511..6b808b159d 100644 --- a/test/test_sdam_monitoring_spec.py +++ b/test/test_sdam_monitoring_spec.py @@ -270,7 +270,7 @@ class TestSdamMonitoring(IntegrationTest): @classmethod @client_context.require_failCommand_fail_point def setUpClass(cls): - super().setUpClass() + super().setUp(cls) # Speed up the tests by decreasing the event publish frequency. cls.knobs = client_knobs( events_queue_frequency=0.1, heartbeat_frequency=0.1, min_heartbeat_interval=0.1 diff --git a/test/test_session.py b/test/test_session.py index d0bbb075a8..634efa11c0 100644 --- a/test/test_session.py +++ b/test/test_session.py @@ -82,36 +82,27 @@ class TestSession(IntegrationTest): client2: MongoClient sensitive_commands: Set[str] - @classmethod @client_context.require_sessions - def _setup_class(cls): - super()._setup_class() + def setUp(self): + super().setUp() # Create a second client so we can make sure clients cannot share # sessions. - cls.client2 = cls.unmanaged_rs_or_single_client() + self.client2 = self.rs_or_single_client() # Redact no commands, so we can test user-admin commands have "lsid". - cls.sensitive_commands = monitoring._SENSITIVE_COMMANDS.copy() + self.sensitive_commands = monitoring._SENSITIVE_COMMANDS.copy() monitoring._SENSITIVE_COMMANDS.clear() - @classmethod - def _tearDown_class(cls): - monitoring._SENSITIVE_COMMANDS.update(cls.sensitive_commands) - cls.client2.close() - super()._tearDown_class() - - def setUp(self): self.listener = SessionTestListener() self.session_checker_listener = SessionTestListener() self.client = self.rs_or_single_client( event_listeners=[self.listener, self.session_checker_listener] ) - self.addCleanup(self.client.close) self.db = self.client.pymongo_test self.initial_lsids = {s["id"] for s in session_ids(self.client)} def tearDown(self): - """All sessions used in the test must be returned to the pool.""" + monitoring._SENSITIVE_COMMANDS.update(self.sensitive_commands) self.client.drop_database("pymongo_test") used_lsids = self.initial_lsids.copy() for event in self.session_checker_listener.started_events: @@ -121,6 +112,8 @@ def tearDown(self): current_lsids = {s["id"] for s in session_ids(self.client)} self.assertLessEqual(used_lsids, current_lsids) + super().tearDown() + def _test_ops(self, client, *ops): listener = client.options.event_listeners[0] @@ -832,18 +825,11 @@ class TestCausalConsistency(UnitTest): listener: SessionTestListener client: MongoClient - @classmethod - def _setup_class(cls): - cls.listener = SessionTestListener() - cls.client = cls.unmanaged_rs_or_single_client(event_listeners=[cls.listener]) - - @classmethod - def _tearDown_class(cls): - cls.client.close() - @client_context.require_sessions def setUp(self): super().setUp() + self.listener = SessionTestListener() + self.client = self.rs_or_single_client(event_listeners=[self.listener]) @client_context.require_no_standalone def test_core(self): diff --git a/test/test_threads.py b/test/test_threads.py index b3dadbb1a3..3e469e28fe 100644 --- a/test/test_threads.py +++ b/test/test_threads.py @@ -105,6 +105,7 @@ def run(self): class TestThreads(IntegrationTest): def setUp(self): + super().setUp() self.db = self.client.pymongo_test def test_threading(self): diff --git a/test/test_transactions.py b/test/test_transactions.py index 3cecbe9d38..949b88e60b 100644 --- a/test/test_transactions.py +++ b/test/test_transactions.py @@ -395,19 +395,12 @@ def __exit__(self, exc_type, exc_val, exc_tb): class TestTransactionsConvenientAPI(TransactionsBase): - @classmethod - def _setup_class(cls): - super()._setup_class() - cls.mongos_clients = [] + def setUp(self) -> None: + super().setUp() + self.mongos_clients = [] if client_context.supports_transactions(): for address in client_context.mongoses: - cls.mongos_clients.append(cls.unmanaged_single_client("{}:{}".format(*address))) - - @classmethod - def _tearDown_class(cls): - for client in cls.mongos_clients: - client.close() - super()._tearDown_class() + self.mongos_clients.append(self.single_client("{}:{}".format(*address))) def _set_fail_point(self, client, command_args): cmd = {"configureFailPoint": "failCommand"} diff --git a/test/test_typing.py b/test/test_typing.py index 441707616e..bfe4d032c1 100644 --- a/test/test_typing.py +++ b/test/test_typing.py @@ -114,10 +114,9 @@ def test_mypy_failures(self) -> None: class TestPymongo(IntegrationTest): coll: Collection - @classmethod - def setUpClass(cls): - super().setUpClass() - cls.coll = cls.client.test.test + def setUp(self): + super().setUp() + self.coll = self.client.test.test def test_insert_find(self) -> None: doc = {"my": "doc"} diff --git a/test/unified_format.py b/test/unified_format.py index 3489a8ac84..5cb268a29d 100644 --- a/test/unified_format.py +++ b/test/unified_format.py @@ -304,7 +304,6 @@ def _create_entity(self, entity_spec, uri=None): kwargs["h"] = uri client = self.test.rs_or_single_client(**kwargs) self[spec["id"]] = client - self.test.addCleanup(client.close) return elif entity_type == "database": client = self[spec["client"]] @@ -479,52 +478,47 @@ def insert_initial_data(self, initial_data): db.create_collection(coll_name, write_concern=wc, **opts) @classmethod - def _setup_class(cls): + def setUpClass(cls) -> None: + # Speed up the tests by decreasing the heartbeat frequency. + cls.knobs = client_knobs( + heartbeat_frequency=0.1, + min_heartbeat_interval=0.1, + kill_cursor_frequency=0.1, + events_queue_frequency=0.1, + ) + cls.knobs.enable() + + @classmethod + def tearDownClass(cls) -> None: + cls.knobs.disable() + + def setUp(self): # super call creates internal client cls.client - super()._setup_class() + super().setUp() # process file-level runOnRequirements - run_on_spec = cls.TEST_SPEC.get("runOnRequirements", []) - if not cls.should_run_on(run_on_spec): - raise unittest.SkipTest(f"{cls.__name__} runOnRequirements not satisfied") + run_on_spec = self.TEST_SPEC.get("runOnRequirements", []) + if not self.should_run_on(run_on_spec): + raise unittest.SkipTest(f"{self.__class__.__name__} runOnRequirements not satisfied") # add any special-casing for skipping tests here if client_context.storage_engine == "mmapv1": - if "retryable-writes" in cls.TEST_SPEC["description"] or "retryable_writes" in str( - cls.TEST_PATH + if "retryable-writes" in self.TEST_SPEC["description"] or "retryable_writes" in str( + self.TEST_PATH ): raise unittest.SkipTest("MMAPv1 does not support retryWrites=True") # Handle mongos_clients for transactions tests. - cls.mongos_clients = [] + self.mongos_clients = [] if ( client_context.supports_transactions() and not client_context.load_balancer and not client_context.serverless ): for address in client_context.mongoses: - cls.mongos_clients.append(cls.unmanaged_single_client("{}:{}".format(*address))) + self.mongos_clients.append(self.single_client("{}:{}".format(*address))) - # Speed up the tests by decreasing the heartbeat frequency. - cls.knobs = client_knobs( - heartbeat_frequency=0.1, - min_heartbeat_interval=0.1, - kill_cursor_frequency=0.1, - events_queue_frequency=0.1, - ) - cls.knobs.enable() - - @classmethod - def _tearDown_class(cls): - cls.knobs.disable() - for client in cls.mongos_clients: - client.close() - super()._tearDown_class() - - def setUp(self): - super().setUp() # process schemaVersion # note: we check major schema version during class generation - # note: we do this here because we cannot run assertions in setUpClass version = Version.from_string(self.TEST_SPEC["schemaVersion"]) self.assertLessEqual( version, @@ -1026,7 +1020,6 @@ def _testOperation_targetedFailPoint(self, spec): ) client = self.single_client("{}:{}".format(*session._pinned_address)) - self.addCleanup(client.close) self.__set_fail_point(client=client, command_args=spec["failPoint"]) def _testOperation_createEntities(self, spec): diff --git a/test/utils.py b/test/utils.py index 9b326e5d73..69154bc63b 100644 --- a/test/utils.py +++ b/test/utils.py @@ -99,6 +99,12 @@ def wait_for_event(self, event, count): """Wait for a number of events to be published, or fail.""" wait_until(lambda: self.event_count(event) >= count, f"find {count} {event} event(s)") + async def async_wait_for_event(self, event, count): + """Wait for a number of events to be published, or fail.""" + await async_wait_until( + lambda: self.event_count(event) >= count, f"find {count} {event} event(s)" + ) + class CMAPListener(BaseListener, monitoring.ConnectionPoolListener): def connection_created(self, event): @@ -644,7 +650,10 @@ async def async_wait_until(predicate, success_description, timeout=10): start = time.time() interval = min(float(timeout) / 100, 0.1) while True: - retval = await predicate() + if iscoroutinefunction(predicate): + retval = await predicate() + else: + retval = predicate() if retval: return retval diff --git a/test/utils_spec_runner.py b/test/utils_spec_runner.py index 8b2679d776..4508502cd0 100644 --- a/test/utils_spec_runner.py +++ b/test/utils_spec_runner.py @@ -249,30 +249,22 @@ class SpecRunner(IntegrationTest): knobs: client_knobs listener: EventListener - @classmethod - def _setup_class(cls): - super()._setup_class() - cls.mongos_clients = [] + def setUp(self) -> None: + super().setUp() + self.mongos_clients = [] # Speed up the tests by decreasing the heartbeat frequency. - cls.knobs = client_knobs(heartbeat_frequency=0.1, min_heartbeat_interval=0.1) - cls.knobs.enable() - - @classmethod - def _tearDown_class(cls): - cls.knobs.disable() - for client in cls.mongos_clients: - client.close() - super()._tearDown_class() - - def setUp(self): - super().setUp() + self.knobs = client_knobs(heartbeat_frequency=0.1, min_heartbeat_interval=0.1) + self.knobs.enable() self.targets = {} self.listener = None # type: ignore self.pool_listener = None self.server_listener = None self.maxDiff = None + def tearDown(self) -> None: + self.knobs.disable() + def _set_fail_point(self, client, command_args): cmd = SON([("configureFailPoint", "failCommand")]) cmd.update(command_args) @@ -697,8 +689,6 @@ def run_scenario(self, scenario_def, test): self.listener = listener self.pool_listener = pool_listener self.server_listener = server_listener - # Close the client explicitly to avoid having too many threads open. - self.addCleanup(client.close) # Create session0 and session1. sessions = {} diff --git a/tools/synchro.py b/tools/synchro.py index 0a7109c6d4..47617365f4 100644 --- a/tools/synchro.py +++ b/tools/synchro.py @@ -110,6 +110,13 @@ "async_set_fail_point": "set_fail_point", "async_ensure_all_connected": "ensure_all_connected", "async_repl_set_step_down": "repl_set_step_down", + "AsyncPeriodicExecutor": "PeriodicExecutor", + "async_wait_for_event": "wait_for_event", + "pymongo_server_monitor_task": "pymongo_server_monitor_thread", + "pymongo_server_rtt_task": "pymongo_server_rtt_thread", + "_async_create_lock": "_create_lock", + "_async_create_condition": "_create_condition", + "_async_cond_wait": "_cond_wait", } docstring_replacements: dict[tuple[str, str], str] = { @@ -130,8 +137,6 @@ ".. warning:: This API is currently in beta, meaning the classes, methods, and behaviors described within may change before the full release." } -type_replacements = {"_Condition": "threading.Condition"} - import_replacements = {"test.synchronous": "test"} _pymongo_base = "./pymongo/asynchronous/" @@ -234,8 +239,6 @@ def process_files(files: list[str]) -> None: lines = translate_async_sleeps(lines) if file in docstring_translate_files: lines = translate_docstrings(lines) - translate_locks(lines) - translate_types(lines) if file in sync_test_files: translate_imports(lines) f.seek(0) @@ -269,34 +272,6 @@ def translate_coroutine_types(lines: list[str]) -> list[str]: return lines -def translate_locks(lines: list[str]) -> list[str]: - lock_lines = [line for line in lines if "_Lock(" in line] - cond_lines = [line for line in lines if "_Condition(" in line] - for line in lock_lines: - res = re.search(r"_Lock\(([^()]*\([^()]*\))\)", line) - if res: - old = res[0] - index = lines.index(line) - lines[index] = line.replace(old, res[1]) - for line in cond_lines: - res = re.search(r"_Condition\(([^()]*\([^()]*\))\)", line) - if res: - old = res[0] - index = lines.index(line) - lines[index] = line.replace(old, res[1]) - - return lines - - -def translate_types(lines: list[str]) -> list[str]: - for k, v in type_replacements.items(): - matches = [line for line in lines if k in line and "import" not in line] - for line in matches: - index = lines.index(line) - lines[index] = line.replace(k, v) - return lines - - def translate_imports(lines: list[str]) -> list[str]: for k, v in import_replacements.items(): matches = [line for line in lines if k in line and "import" in line] From cbeebd01901467a204687a70ca498dad70539502 Mon Sep 17 00:00:00 2001 From: theRealProHacker <77074862+theRealProHacker@users.noreply.github.com> Date: Mon, 2 Dec 2024 17:54:56 +0100 Subject: [PATCH 116/182] Small doc fix (#2021) Co-authored-by: Steven Silvester --- pymongo/asynchronous/cursor.py | 2 +- pymongo/synchronous/cursor.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pymongo/asynchronous/cursor.py b/pymongo/asynchronous/cursor.py index 7d7ae4a5db..8193e53282 100644 --- a/pymongo/asynchronous/cursor.py +++ b/pymongo/asynchronous/cursor.py @@ -1299,7 +1299,7 @@ async def to_list(self, length: Optional[int] = None) -> list[_DocumentType]: >>> await cursor.to_list() - Or, so read at most n items from the cursor:: + Or, to read at most n items from the cursor:: >>> await cursor.to_list(n) diff --git a/pymongo/synchronous/cursor.py b/pymongo/synchronous/cursor.py index 9a7637704f..b35098a327 100644 --- a/pymongo/synchronous/cursor.py +++ b/pymongo/synchronous/cursor.py @@ -1297,7 +1297,7 @@ def to_list(self, length: Optional[int] = None) -> list[_DocumentType]: >>> cursor.to_list() - Or, so read at most n items from the cursor:: + Or, to read at most n items from the cursor:: >>> cursor.to_list(n) From bc66598623c46e072518f6f11347096476b100c1 Mon Sep 17 00:00:00 2001 From: Noah Stapp Date: Mon, 2 Dec 2024 12:17:52 -0500 Subject: [PATCH 117/182] PYTHON-4965 - Consolidate startup and teardown tasks (#2017) --- .evergreen/combine-coverage.sh | 0 .evergreen/config.yml | 99 ++++++------------- .evergreen/hatch.sh | 0 .evergreen/install-dependencies.sh | 0 .evergreen/run-azurekms-fail-test.sh | 0 .evergreen/run-azurekms-test.sh | 0 .evergreen/run-deployed-lambda-aws-tests.sh | 0 .evergreen/run-gcpkms-test.sh | 0 .evergreen/run-perf-tests.sh | 0 .evergreen/scripts/archive-mongodb-logs.sh | 0 .../scripts/bootstrap-mongo-orchestration.sh | 0 .evergreen/scripts/check-import-time.sh | 0 .evergreen/scripts/cleanup.sh | 0 .evergreen/scripts/configure-env.sh | 0 .../scripts/download-and-merge-coverage.sh | 0 .evergreen/scripts/fix-absolute-paths.sh | 0 .evergreen/scripts/init-test-results.sh | 0 .evergreen/scripts/install-dependencies.sh | 0 .evergreen/scripts/make-files-executable.sh | 0 .evergreen/scripts/prepare-resources.sh | 0 .evergreen/scripts/run-atlas-tests.sh | 0 .evergreen/scripts/run-aws-ecs-auth-test.sh | 0 .evergreen/scripts/run-doctests.sh | 0 .../scripts/run-enterprise-auth-tests.sh | 0 .evergreen/scripts/run-gcpkms-fail-test.sh | 0 .evergreen/scripts/run-getdata.sh | 0 .evergreen/scripts/run-load-balancer.sh | 0 .evergreen/scripts/run-mockupdb-tests.sh | 0 .evergreen/scripts/run-mod-wsgi-tests.sh | 0 .evergreen/scripts/run-ocsp-test.sh | 0 .evergreen/scripts/run-perf-tests.sh | 0 .evergreen/scripts/run-tests.sh | 0 .evergreen/scripts/run-with-env.sh | 0 .evergreen/scripts/setup-encryption.sh | 0 .evergreen/scripts/setup-tests.sh | 0 .evergreen/scripts/stop-load-balancer.sh | 0 .evergreen/scripts/teardown-aws.sh | 7 -- .evergreen/scripts/teardown-docker.sh | 7 -- .evergreen/scripts/upload-coverage-report.sh | 0 .evergreen/scripts/windows-fix.sh | 0 .evergreen/setup-encryption.sh | 0 .evergreen/teardown-encryption.sh | 0 .pre-commit-config.yaml | 12 +++ tools/synchro.sh | 0 44 files changed, 44 insertions(+), 81 deletions(-) mode change 100644 => 100755 .evergreen/combine-coverage.sh mode change 100644 => 100755 .evergreen/hatch.sh mode change 100644 => 100755 .evergreen/install-dependencies.sh mode change 100644 => 100755 .evergreen/run-azurekms-fail-test.sh mode change 100644 => 100755 .evergreen/run-azurekms-test.sh mode change 100644 => 100755 .evergreen/run-deployed-lambda-aws-tests.sh mode change 100644 => 100755 .evergreen/run-gcpkms-test.sh mode change 100644 => 100755 .evergreen/run-perf-tests.sh mode change 100644 => 100755 .evergreen/scripts/archive-mongodb-logs.sh mode change 100644 => 100755 .evergreen/scripts/bootstrap-mongo-orchestration.sh mode change 100644 => 100755 .evergreen/scripts/check-import-time.sh mode change 100644 => 100755 .evergreen/scripts/cleanup.sh mode change 100644 => 100755 .evergreen/scripts/configure-env.sh mode change 100644 => 100755 .evergreen/scripts/download-and-merge-coverage.sh mode change 100644 => 100755 .evergreen/scripts/fix-absolute-paths.sh mode change 100644 => 100755 .evergreen/scripts/init-test-results.sh mode change 100644 => 100755 .evergreen/scripts/install-dependencies.sh mode change 100644 => 100755 .evergreen/scripts/make-files-executable.sh mode change 100644 => 100755 .evergreen/scripts/prepare-resources.sh mode change 100644 => 100755 .evergreen/scripts/run-atlas-tests.sh mode change 100644 => 100755 .evergreen/scripts/run-aws-ecs-auth-test.sh mode change 100644 => 100755 .evergreen/scripts/run-doctests.sh mode change 100644 => 100755 .evergreen/scripts/run-enterprise-auth-tests.sh mode change 100644 => 100755 .evergreen/scripts/run-gcpkms-fail-test.sh mode change 100644 => 100755 .evergreen/scripts/run-getdata.sh mode change 100644 => 100755 .evergreen/scripts/run-load-balancer.sh mode change 100644 => 100755 .evergreen/scripts/run-mockupdb-tests.sh mode change 100644 => 100755 .evergreen/scripts/run-mod-wsgi-tests.sh mode change 100644 => 100755 .evergreen/scripts/run-ocsp-test.sh mode change 100644 => 100755 .evergreen/scripts/run-perf-tests.sh mode change 100644 => 100755 .evergreen/scripts/run-tests.sh mode change 100644 => 100755 .evergreen/scripts/run-with-env.sh mode change 100644 => 100755 .evergreen/scripts/setup-encryption.sh mode change 100644 => 100755 .evergreen/scripts/setup-tests.sh mode change 100644 => 100755 .evergreen/scripts/stop-load-balancer.sh delete mode 100644 .evergreen/scripts/teardown-aws.sh delete mode 100644 .evergreen/scripts/teardown-docker.sh mode change 100644 => 100755 .evergreen/scripts/upload-coverage-report.sh mode change 100644 => 100755 .evergreen/scripts/windows-fix.sh mode change 100644 => 100755 .evergreen/setup-encryption.sh mode change 100644 => 100755 .evergreen/teardown-encryption.sh mode change 100644 => 100755 tools/synchro.sh diff --git a/.evergreen/combine-coverage.sh b/.evergreen/combine-coverage.sh old mode 100644 new mode 100755 diff --git a/.evergreen/config.yml b/.evergreen/config.yml index 7ca3a72b1a..ac89270d84 100644 --- a/.evergreen/config.yml +++ b/.evergreen/config.yml @@ -37,6 +37,8 @@ functions: # Applies the subitted patch, if any # Deprecated. Should be removed. But still needed for certain agents (ZAP) - command: git.apply_patch + + "setup system": # Make an evergreen expansion file with dynamic values - command: subprocess.exec params: @@ -49,13 +51,19 @@ functions: - command: expansions.update params: file: src/expansion.yml - - "prepare resources": - command: subprocess.exec params: + include_expansions_in_env: ["PROJECT_DIRECTORY", "DRIVERS_TOOLS"] binary: bash args: - src/.evergreen/scripts/prepare-resources.sh + # Run drivers-evergreen-tools system setup + - command: subprocess.exec + params: + include_expansions_in_env: ["PROJECT_DIRECTORY", "DRIVERS_TOOLS"] + binary: bash + args: + - ${DRIVERS_TOOLS}/.evergreen/setup.sh "upload coverage" : - command: ec2.assume_role @@ -511,41 +519,32 @@ functions: - .evergreen/scripts/run-with-env.sh - .evergreen/scripts/cleanup.sh - "teardown": + "teardown system": - command: subprocess.exec params: binary: bash working_dir: "src" args: - - ${DRIVERS_TOOLS}/.evergreen/teardown.sh - - "fix absolute paths": + # Ensure the instance profile is reassigned for aws tests. + - ${DRIVERS_TOOLS}/.evergreen/auth_aws/teardown.sh - command: subprocess.exec params: binary: bash + working_dir: "src" args: - - src/.evergreen/scripts/fix-absolute-paths.sh - - "windows fix": + - ${DRIVERS_TOOLS}/.evergreen/csfle/teardown.sh - command: subprocess.exec params: binary: bash + working_dir: "src" args: - - src/.evergreen/scripts/windows-fix.sh - - "make files executable": - - command: subprocess.exec - params: - binary: bash - args: - - src/.evergreen/scripts/make-files-executable.sh - - "init test-results": + - ${DRIVERS_TOOLS}/.evergreen/ocsp/teardown.sh - command: subprocess.exec params: binary: bash + working_dir: "src" args: - - src/.evergreen/scripts/init-test-results.sh + - ${DRIVERS_TOOLS}/.evergreen/teardown.sh "install dependencies": - command: subprocess.exec @@ -621,21 +620,6 @@ functions: args: - src/.evergreen/scripts/stop-load-balancer.sh - "teardown_docker": - - command: subprocess.exec - params: - binary: bash - args: - - src/.evergreen/scripts/teardown-docker.sh - - "teardown_aws": - - command: subprocess.exec - params: - binary: bash - args: - - src/.evergreen/scripts/run-with-env.sh - - src/.evergreen/scripts/teardown-aws.sh - "teardown atlas": - command: subprocess.exec params: @@ -665,25 +649,19 @@ functions: pre: - func: "fetch source" - - func: "prepare resources" - - func: "windows fix" - - func: "fix absolute paths" - - func: "init test-results" - - func: "make files executable" + - func: "setup system" - func: "install dependencies" - func: "assume ec2 role" post: # Disabled, causing timeouts # - func: "upload working dir" - - func: "teardown" + - func: "teardown system" - func: "upload coverage" - func: "upload mo artifacts" - func: "upload test results" - func: "stop mongo-orchestration" - - func: "teardown_aws" - func: "cleanup" - - func: "teardown_docker" task_groups: - name: serverless_task_group @@ -691,7 +669,7 @@ task_groups: setup_group_timeout_secs: 1800 # 30 minutes setup_group: - func: "fetch source" - - func: "prepare resources" + - func: "setup system" - command: subprocess.exec params: binary: bash @@ -714,9 +692,7 @@ task_groups: setup_group_timeout_secs: 1800 # 30 minutes setup_group: - func: fetch source - - func: prepare resources - - func: fix absolute paths - - func: make files executable + - func: setup system - command: subprocess.exec params: binary: bash @@ -735,9 +711,7 @@ task_groups: - name: testazurekms_task_group setup_group: - func: fetch source - - func: prepare resources - - func: fix absolute paths - - func: make files executable + - func: setup system - command: subprocess.exec params: binary: bash @@ -761,9 +735,7 @@ task_groups: - name: testazureoidc_task_group setup_group: - func: fetch source - - func: prepare resources - - func: fix absolute paths - - func: make files executable + - func: setup system - command: subprocess.exec params: binary: bash @@ -785,9 +757,7 @@ task_groups: - name: testgcpoidc_task_group setup_group: - func: fetch source - - func: prepare resources - - func: fix absolute paths - - func: make files executable + - func: setup system - command: subprocess.exec params: binary: bash @@ -809,9 +779,7 @@ task_groups: - name: testk8soidc_task_group setup_group: - func: fetch source - - func: prepare resources - - func: fix absolute paths - - func: make files executable + - func: setup system - command: ec2.assume_role params: role_arn: ${aws_test_secrets_role} @@ -835,9 +803,7 @@ task_groups: - name: testoidc_task_group setup_group: - func: fetch source - - func: prepare resources - - func: fix absolute paths - - func: make files executable + - func: setup system - func: "assume ec2 role" - command: subprocess.exec params: @@ -859,7 +825,7 @@ task_groups: - name: test_aws_lambda_task_group setup_group: - func: fetch source - - func: prepare resources + - func: setup system - func: setup atlas teardown_task: - func: teardown atlas @@ -871,9 +837,7 @@ task_groups: - name: test_atlas_task_group_search_indexes setup_group: - func: fetch source - - func: prepare resources - - func: fix absolute paths - - func: make files executable + - func: setup system - func: setup atlas teardown_task: - func: teardown atlas @@ -1584,7 +1548,7 @@ tasks: - name: testazurekms-fail-task commands: - func: fetch source - - func: make files executable + - func: setup system - func: "bootstrap mongo-orchestration" vars: VERSION: "latest" @@ -1640,6 +1604,7 @@ tasks: params: binary: bash working_dir: src + include_expansions_in_env: ["PYTHON_BINARY"] args: - .evergreen/scripts/check-import-time.sh - ${revision} diff --git a/.evergreen/hatch.sh b/.evergreen/hatch.sh old mode 100644 new mode 100755 diff --git a/.evergreen/install-dependencies.sh b/.evergreen/install-dependencies.sh old mode 100644 new mode 100755 diff --git a/.evergreen/run-azurekms-fail-test.sh b/.evergreen/run-azurekms-fail-test.sh old mode 100644 new mode 100755 diff --git a/.evergreen/run-azurekms-test.sh b/.evergreen/run-azurekms-test.sh old mode 100644 new mode 100755 diff --git a/.evergreen/run-deployed-lambda-aws-tests.sh b/.evergreen/run-deployed-lambda-aws-tests.sh old mode 100644 new mode 100755 diff --git a/.evergreen/run-gcpkms-test.sh b/.evergreen/run-gcpkms-test.sh old mode 100644 new mode 100755 diff --git a/.evergreen/run-perf-tests.sh b/.evergreen/run-perf-tests.sh old mode 100644 new mode 100755 diff --git a/.evergreen/scripts/archive-mongodb-logs.sh b/.evergreen/scripts/archive-mongodb-logs.sh old mode 100644 new mode 100755 diff --git a/.evergreen/scripts/bootstrap-mongo-orchestration.sh b/.evergreen/scripts/bootstrap-mongo-orchestration.sh old mode 100644 new mode 100755 diff --git a/.evergreen/scripts/check-import-time.sh b/.evergreen/scripts/check-import-time.sh old mode 100644 new mode 100755 diff --git a/.evergreen/scripts/cleanup.sh b/.evergreen/scripts/cleanup.sh old mode 100644 new mode 100755 diff --git a/.evergreen/scripts/configure-env.sh b/.evergreen/scripts/configure-env.sh old mode 100644 new mode 100755 diff --git a/.evergreen/scripts/download-and-merge-coverage.sh b/.evergreen/scripts/download-and-merge-coverage.sh old mode 100644 new mode 100755 diff --git a/.evergreen/scripts/fix-absolute-paths.sh b/.evergreen/scripts/fix-absolute-paths.sh old mode 100644 new mode 100755 diff --git a/.evergreen/scripts/init-test-results.sh b/.evergreen/scripts/init-test-results.sh old mode 100644 new mode 100755 diff --git a/.evergreen/scripts/install-dependencies.sh b/.evergreen/scripts/install-dependencies.sh old mode 100644 new mode 100755 diff --git a/.evergreen/scripts/make-files-executable.sh b/.evergreen/scripts/make-files-executable.sh old mode 100644 new mode 100755 diff --git a/.evergreen/scripts/prepare-resources.sh b/.evergreen/scripts/prepare-resources.sh old mode 100644 new mode 100755 diff --git a/.evergreen/scripts/run-atlas-tests.sh b/.evergreen/scripts/run-atlas-tests.sh old mode 100644 new mode 100755 diff --git a/.evergreen/scripts/run-aws-ecs-auth-test.sh b/.evergreen/scripts/run-aws-ecs-auth-test.sh old mode 100644 new mode 100755 diff --git a/.evergreen/scripts/run-doctests.sh b/.evergreen/scripts/run-doctests.sh old mode 100644 new mode 100755 diff --git a/.evergreen/scripts/run-enterprise-auth-tests.sh b/.evergreen/scripts/run-enterprise-auth-tests.sh old mode 100644 new mode 100755 diff --git a/.evergreen/scripts/run-gcpkms-fail-test.sh b/.evergreen/scripts/run-gcpkms-fail-test.sh old mode 100644 new mode 100755 diff --git a/.evergreen/scripts/run-getdata.sh b/.evergreen/scripts/run-getdata.sh old mode 100644 new mode 100755 diff --git a/.evergreen/scripts/run-load-balancer.sh b/.evergreen/scripts/run-load-balancer.sh old mode 100644 new mode 100755 diff --git a/.evergreen/scripts/run-mockupdb-tests.sh b/.evergreen/scripts/run-mockupdb-tests.sh old mode 100644 new mode 100755 diff --git a/.evergreen/scripts/run-mod-wsgi-tests.sh b/.evergreen/scripts/run-mod-wsgi-tests.sh old mode 100644 new mode 100755 diff --git a/.evergreen/scripts/run-ocsp-test.sh b/.evergreen/scripts/run-ocsp-test.sh old mode 100644 new mode 100755 diff --git a/.evergreen/scripts/run-perf-tests.sh b/.evergreen/scripts/run-perf-tests.sh old mode 100644 new mode 100755 diff --git a/.evergreen/scripts/run-tests.sh b/.evergreen/scripts/run-tests.sh old mode 100644 new mode 100755 diff --git a/.evergreen/scripts/run-with-env.sh b/.evergreen/scripts/run-with-env.sh old mode 100644 new mode 100755 diff --git a/.evergreen/scripts/setup-encryption.sh b/.evergreen/scripts/setup-encryption.sh old mode 100644 new mode 100755 diff --git a/.evergreen/scripts/setup-tests.sh b/.evergreen/scripts/setup-tests.sh old mode 100644 new mode 100755 diff --git a/.evergreen/scripts/stop-load-balancer.sh b/.evergreen/scripts/stop-load-balancer.sh old mode 100644 new mode 100755 diff --git a/.evergreen/scripts/teardown-aws.sh b/.evergreen/scripts/teardown-aws.sh deleted file mode 100644 index 634d1e5724..0000000000 --- a/.evergreen/scripts/teardown-aws.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/bash - -cd "${DRIVERS_TOOLS}/.evergreen/auth_aws" || exit -if [ -f "./aws_e2e_setup.json" ]; then - . ./activate-authawsvenv.sh - python ./lib/aws_assign_instance_profile.py -fi diff --git a/.evergreen/scripts/teardown-docker.sh b/.evergreen/scripts/teardown-docker.sh deleted file mode 100644 index 733779d058..0000000000 --- a/.evergreen/scripts/teardown-docker.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/bash - -# Remove all Docker images -DOCKER=$(command -v docker) || true -if [ -n "$DOCKER" ]; then - docker rmi -f "$(docker images -a -q)" &> /dev/null || true -fi diff --git a/.evergreen/scripts/upload-coverage-report.sh b/.evergreen/scripts/upload-coverage-report.sh old mode 100644 new mode 100755 diff --git a/.evergreen/scripts/windows-fix.sh b/.evergreen/scripts/windows-fix.sh old mode 100644 new mode 100755 diff --git a/.evergreen/setup-encryption.sh b/.evergreen/setup-encryption.sh old mode 100644 new mode 100755 diff --git a/.evergreen/teardown-encryption.sh b/.evergreen/teardown-encryption.sh old mode 100644 new mode 100755 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6e2b497e59..4f6759bc5a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -102,3 +102,15 @@ repos: # - test/versioned-api/crud-api-version-1-strict.json:514: nin ==> inn, min, bin, nine # - test/test_client.py:188: te ==> the, be, we, to args: ["-L", "fle,fo,infinit,isnt,nin,te,aks"] + +- repo: local + hooks: + - id: executable-shell + name: executable-shell + entry: chmod +x + language: system + types: [shell] + exclude: | + (?x)( + .evergreen/retry-with-backoff.sh + ) diff --git a/tools/synchro.sh b/tools/synchro.sh old mode 100644 new mode 100755 From 0f61ebb1150266a8cc9df70b87c4175b5f23aead Mon Sep 17 00:00:00 2001 From: Noah Stapp Date: Mon, 2 Dec 2024 12:35:31 -0500 Subject: [PATCH 118/182] PYTHON-4995 - Skip TestNoSessionsSupport tests on crypt_shared (#2022) --- test/asynchronous/test_encryption.py | 2 +- test/test_encryption.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/test/asynchronous/test_encryption.py b/test/asynchronous/test_encryption.py index 048db2d501..21cd5e2666 100644 --- a/test/asynchronous/test_encryption.py +++ b/test/asynchronous/test_encryption.py @@ -3069,11 +3069,11 @@ def start_mongocryptd(port) -> None: _spawn_daemon(args) +@unittest.skipIf(os.environ.get("TEST_CRYPT_SHARED"), "crypt_shared lib is installed") class TestNoSessionsSupport(AsyncEncryptionIntegrationTest): mongocryptd_client: AsyncMongoClient MONGOCRYPTD_PORT = 27020 - @unittest.skipIf(os.environ.get("TEST_CRYPT_SHARED"), "crypt_shared lib is installed") async def asyncSetUp(self) -> None: await super().asyncSetUp() start_mongocryptd(self.MONGOCRYPTD_PORT) diff --git a/test/test_encryption.py b/test/test_encryption.py index cb8bcb74d6..18e21fe6a7 100644 --- a/test/test_encryption.py +++ b/test/test_encryption.py @@ -3051,11 +3051,11 @@ def start_mongocryptd(port) -> None: _spawn_daemon(args) +@unittest.skipIf(os.environ.get("TEST_CRYPT_SHARED"), "crypt_shared lib is installed") class TestNoSessionsSupport(EncryptionIntegrationTest): mongocryptd_client: MongoClient MONGOCRYPTD_PORT = 27020 - @unittest.skipIf(os.environ.get("TEST_CRYPT_SHARED"), "crypt_shared lib is installed") def setUp(self) -> None: super().setUp() start_mongocryptd(self.MONGOCRYPTD_PORT) From a9e61f6bed71dbf9a26d46d18d9905a6a0ccdd16 Mon Sep 17 00:00:00 2001 From: Shane Harvey Date: Mon, 2 Dec 2024 10:08:52 -0800 Subject: [PATCH 119/182] PYTHON-4292 Improve TLS read performance (#2020) --- pymongo/network_layer.py | 91 +++++++++++++++++----------------------- 1 file changed, 38 insertions(+), 53 deletions(-) diff --git a/pymongo/network_layer.py b/pymongo/network_layer.py index 6ab6db2f7d..beffba6d18 100644 --- a/pymongo/network_layer.py +++ b/pymongo/network_layer.py @@ -28,7 +28,7 @@ Union, ) -from pymongo import _csot, ssl_support +from pymongo import ssl_support from pymongo._asyncio_task import create_task from pymongo.errors import _OperationCancelled from pymongo.socket_checker import _errno_from_exception @@ -316,62 +316,47 @@ async def _async_receive(conn: socket.socket, length: int, loop: AbstractEventLo return mv -# Sync version: -def wait_for_read(conn: Connection, deadline: Optional[float]) -> None: - """Block until at least one byte is read, or a timeout, or a cancel.""" - sock = conn.conn - timed_out = False - # Check if the connection's socket has been manually closed - if sock.fileno() == -1: - return - while True: - # SSLSocket can have buffered data which won't be caught by select. - if hasattr(sock, "pending") and sock.pending() > 0: - readable = True - else: - # Wait up to 500ms for the socket to become readable and then - # check for cancellation. - if deadline: - remaining = deadline - time.monotonic() - # When the timeout has expired perform one final check to - # see if the socket is readable. This helps avoid spurious - # timeouts on AWS Lambda and other FaaS environments. - if remaining <= 0: - timed_out = True - timeout = max(min(remaining, _POLL_TIMEOUT), 0) - else: - timeout = _POLL_TIMEOUT - readable = conn.socket_checker.select(sock, read=True, timeout=timeout) - if conn.cancel_context.cancelled: - raise _OperationCancelled("operation cancelled") - if readable: - return - if timed_out: - raise socket.timeout("timed out") - - def receive_data(conn: Connection, length: int, deadline: Optional[float]) -> memoryview: buf = bytearray(length) mv = memoryview(buf) bytes_read = 0 - while bytes_read < length: - try: - wait_for_read(conn, deadline) - # CSOT: Update timeout. When the timeout has expired perform one - # final non-blocking recv. This helps avoid spurious timeouts when - # the response is actually already buffered on the client. - if _csot.get_timeout() and deadline is not None: - conn.set_conn_timeout(max(deadline - time.monotonic(), 0)) - chunk_length = conn.conn.recv_into(mv[bytes_read:]) - except BLOCKING_IO_ERRORS: - raise socket.timeout("timed out") from None - except OSError as exc: - if _errno_from_exception(exc) == errno.EINTR: + # To support cancelling a network read, we shorten the socket timeout and + # check for the cancellation signal after each timeout. Alternatively we + # could close the socket but that does not reliably cancel recv() calls + # on all OSes. + orig_timeout = conn.conn.gettimeout() + try: + while bytes_read < length: + if deadline is not None: + # CSOT: Update timeout. When the timeout has expired perform one + # final non-blocking recv. This helps avoid spurious timeouts when + # the response is actually already buffered on the client. + short_timeout = min(max(deadline - time.monotonic(), 0), _POLL_TIMEOUT) + else: + short_timeout = _POLL_TIMEOUT + conn.set_conn_timeout(short_timeout) + try: + chunk_length = conn.conn.recv_into(mv[bytes_read:]) + except BLOCKING_IO_ERRORS: + if conn.cancel_context.cancelled: + raise _OperationCancelled("operation cancelled") from None + # We reached the true deadline. + raise socket.timeout("timed out") from None + except socket.timeout: + if conn.cancel_context.cancelled: + raise _OperationCancelled("operation cancelled") from None continue - raise - if chunk_length == 0: - raise OSError("connection closed") - - bytes_read += chunk_length + except OSError as exc: + if conn.cancel_context.cancelled: + raise _OperationCancelled("operation cancelled") from None + if _errno_from_exception(exc) == errno.EINTR: + continue + raise + if chunk_length == 0: + raise OSError("connection closed") + + bytes_read += chunk_length + finally: + conn.set_conn_timeout(orig_timeout) return mv From f45b35a478f5f8a9c07b8e029869db5799113576 Mon Sep 17 00:00:00 2001 From: Noah Stapp Date: Mon, 2 Dec 2024 14:08:22 -0500 Subject: [PATCH 120/182] PYTHON-4996 - Ensure all async integration tests call their parent asyncSetup method (#2023) --- test/asynchronous/test_client_bulk_write.py | 2 ++ .../test_connections_survive_primary_stepdown_spec.py | 1 + test/test_client_bulk_write.py | 2 ++ test/test_connections_survive_primary_stepdown_spec.py | 1 + test/test_gridfs.py | 1 + test/test_gridfs_bucket.py | 1 + 6 files changed, 8 insertions(+) diff --git a/test/asynchronous/test_client_bulk_write.py b/test/asynchronous/test_client_bulk_write.py index 01294402de..a82629f495 100644 --- a/test/asynchronous/test_client_bulk_write.py +++ b/test/asynchronous/test_client_bulk_write.py @@ -102,6 +102,7 @@ async def test_raw_bson_not_inflated(self): # https://github.com/mongodb/specifications/tree/master/source/crud/tests class TestClientBulkWriteCRUD(AsyncIntegrationTest): async def asyncSetUp(self): + await super().asyncSetUp() self.max_write_batch_size = await async_client_context.max_write_batch_size self.max_bson_object_size = await async_client_context.max_bson_size self.max_message_size_bytes = await async_client_context.max_message_size_bytes @@ -652,6 +653,7 @@ class TestClientBulkWriteCSOT(AsyncIntegrationTest): async def asyncSetUp(self): if os.environ.get("SKIP_CSOT_TESTS", ""): raise unittest.SkipTest("SKIP_CSOT_TESTS is set, skipping...") + await super().asyncSetUp() self.max_write_batch_size = await async_client_context.max_write_batch_size self.max_bson_object_size = await async_client_context.max_bson_size self.max_message_size_bytes = await async_client_context.max_message_size_bytes diff --git a/test/asynchronous/test_connections_survive_primary_stepdown_spec.py b/test/asynchronous/test_connections_survive_primary_stepdown_spec.py index bc9638b443..4795d3937a 100644 --- a/test/asynchronous/test_connections_survive_primary_stepdown_spec.py +++ b/test/asynchronous/test_connections_survive_primary_stepdown_spec.py @@ -46,6 +46,7 @@ class TestAsyncConnectionsSurvivePrimaryStepDown(AsyncIntegrationTest): @async_client_context.require_replica_set async def asyncSetUp(self): + await super().asyncSetUp() self.listener = CMAPListener() self.client = await self.async_rs_or_single_client( event_listeners=[self.listener], retryWrites=False, heartbeatFrequencyMS=500 diff --git a/test/test_client_bulk_write.py b/test/test_client_bulk_write.py index f06c07d588..c1cc27c28a 100644 --- a/test/test_client_bulk_write.py +++ b/test/test_client_bulk_write.py @@ -102,6 +102,7 @@ def test_raw_bson_not_inflated(self): # https://github.com/mongodb/specifications/tree/master/source/crud/tests class TestClientBulkWriteCRUD(IntegrationTest): def setUp(self): + super().setUp() self.max_write_batch_size = client_context.max_write_batch_size self.max_bson_object_size = client_context.max_bson_size self.max_message_size_bytes = client_context.max_message_size_bytes @@ -648,6 +649,7 @@ class TestClientBulkWriteCSOT(IntegrationTest): def setUp(self): if os.environ.get("SKIP_CSOT_TESTS", ""): raise unittest.SkipTest("SKIP_CSOT_TESTS is set, skipping...") + super().setUp() self.max_write_batch_size = client_context.max_write_batch_size self.max_bson_object_size = client_context.max_bson_size self.max_message_size_bytes = client_context.max_message_size_bytes diff --git a/test/test_connections_survive_primary_stepdown_spec.py b/test/test_connections_survive_primary_stepdown_spec.py index 84ef6decd5..1fb08cbed5 100644 --- a/test/test_connections_survive_primary_stepdown_spec.py +++ b/test/test_connections_survive_primary_stepdown_spec.py @@ -46,6 +46,7 @@ class TestConnectionsSurvivePrimaryStepDown(IntegrationTest): @client_context.require_replica_set def setUp(self): + super().setUp() self.listener = CMAPListener() self.client = self.rs_or_single_client( event_listeners=[self.listener], retryWrites=False, heartbeatFrequencyMS=500 diff --git a/test/test_gridfs.py b/test/test_gridfs.py index a36109f399..ab8950250b 100644 --- a/test/test_gridfs.py +++ b/test/test_gridfs.py @@ -511,6 +511,7 @@ def setUp(self): super().setUp() @classmethod + @client_context.require_connection def tearDownClass(cls): client_context.client.drop_database("gfsreplica") diff --git a/test/test_gridfs_bucket.py b/test/test_gridfs_bucket.py index 04c7427350..0af4dce811 100644 --- a/test/test_gridfs_bucket.py +++ b/test/test_gridfs_bucket.py @@ -481,6 +481,7 @@ def setUp(self): super().setUp() @classmethod + @client_context.require_connection def tearDownClass(cls): client_context.client.drop_database("gfsbucketreplica") From fdcbe2e62237623c20b23f740bc66894532d475a Mon Sep 17 00:00:00 2001 From: Navjot Date: Tue, 3 Dec 2024 14:22:06 +0000 Subject: [PATCH 121/182] PYTHON-1982 Update Invalid Document error message to include doc (#1854) Co-authored-by: Navjot Singh Co-authored-by: Navjot Singh Co-authored-by: Steven Silvester --- bson/__init__.py | 5 ++++- bson/_cbsonmodule.c | 35 +++++++++++++++++++++++++++++++++++ doc/contributors.rst | 1 + test/test_bson.py | 13 +++++++++++++ 4 files changed, 53 insertions(+), 1 deletion(-) diff --git a/bson/__init__.py b/bson/__init__.py index e866a99c8d..fc6efe0d59 100644 --- a/bson/__init__.py +++ b/bson/__init__.py @@ -1006,7 +1006,10 @@ def _dict_to_bson( elements.append(_name_value_to_bson(b"_id\x00", doc["_id"], check_keys, opts)) for key, value in doc.items(): if not top_level or key != "_id": - elements.append(_element_to_bson(key, value, check_keys, opts)) + try: + elements.append(_element_to_bson(key, value, check_keys, opts)) + except InvalidDocument as err: + raise InvalidDocument(f"Invalid document {doc} | {err}") from err except AttributeError: raise TypeError(f"encoder expected a mapping type but got: {doc!r}") from None diff --git a/bson/_cbsonmodule.c b/bson/_cbsonmodule.c index a66071c285..d91c7e0536 100644 --- a/bson/_cbsonmodule.c +++ b/bson/_cbsonmodule.c @@ -1743,6 +1743,41 @@ int write_dict(PyObject* self, buffer_t buffer, while (PyDict_Next(dict, &pos, &key, &value)) { if (!decode_and_write_pair(self, buffer, key, value, check_keys, options, top_level)) { + if (PyErr_Occurred()) { + PyObject *etype = NULL, *evalue = NULL, *etrace = NULL; + PyErr_Fetch(&etype, &evalue, &etrace); + PyObject *InvalidDocument = _error("InvalidDocument"); + + if (top_level && InvalidDocument && PyErr_GivenExceptionMatches(etype, InvalidDocument)) { + + Py_DECREF(etype); + etype = InvalidDocument; + + if (evalue) { + PyObject *msg = PyObject_Str(evalue); + Py_DECREF(evalue); + + if (msg) { + // Prepend doc to the existing message + PyObject *dict_str = PyObject_Str(dict); + PyObject *new_msg = PyUnicode_FromFormat("Invalid document %s | %s", PyUnicode_AsUTF8(dict_str), PyUnicode_AsUTF8(msg)); + Py_DECREF(dict_str); + + if (new_msg) { + evalue = new_msg; + } + else { + evalue = msg; + } + } + } + PyErr_NormalizeException(&etype, &evalue, &etrace); + } + else { + Py_DECREF(InvalidDocument); + } + PyErr_Restore(etype, evalue, etrace); + } return 0; } } diff --git a/doc/contributors.rst b/doc/contributors.rst index 272b81d6ae..4a7f5424b1 100644 --- a/doc/contributors.rst +++ b/doc/contributors.rst @@ -102,3 +102,4 @@ The following is a list of people who have contributed to - Ivan Lukyanchikov (ilukyanchikov) - Terry Patterson - Romain Morotti +- Navjot Singh (navjots18) diff --git a/test/test_bson.py b/test/test_bson.py index b431f700dc..e550b538d3 100644 --- a/test/test_bson.py +++ b/test/test_bson.py @@ -1099,6 +1099,19 @@ def __repr__(self): ): encode({"t": Wrapper(1)}) + def test_doc_in_invalid_document_error_message(self): + class Wrapper: + def __init__(self, val): + self.val = val + + def __repr__(self): + return repr(self.val) + + self.assertEqual("1", repr(Wrapper(1))) + doc = {"t": Wrapper(1)} + with self.assertRaisesRegex(InvalidDocument, f"Invalid document {doc}"): + encode(doc) + class TestCodecOptions(unittest.TestCase): def test_document_class(self): From ce1c49a668a2dce46c8faafaa3f4be9adfaf4b90 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Tue, 3 Dec 2024 14:29:23 -0600 Subject: [PATCH 122/182] PYTHON-4646 Improve usage of hatch in evergreen (#2025) --- .evergreen/config.yml | 2 +- .evergreen/generated_configs/variants.yml | 9 --- .evergreen/hatch.sh | 46 +--------------- .evergreen/install-dependencies.sh | 3 + .evergreen/run-mongodb-aws-ecs-test.sh | 6 +- .evergreen/scripts/configure-env.sh | 4 +- .evergreen/scripts/ensure-hatch.sh | 55 +++++++++++++++++++ .evergreen/scripts/generate_config.py | 7 +-- .../scripts/run-enterprise-auth-tests.sh | 3 +- .evergreen/scripts/run-tests.sh | 1 - .gitignore | 2 + 11 files changed, 74 insertions(+), 64 deletions(-) create mode 100755 .evergreen/scripts/ensure-hatch.sh diff --git a/.evergreen/config.yml b/.evergreen/config.yml index ac89270d84..5c0e2983ea 100644 --- a/.evergreen/config.yml +++ b/.evergreen/config.yml @@ -42,7 +42,7 @@ functions: # Make an evergreen expansion file with dynamic values - command: subprocess.exec params: - include_expansions_in_env: ["is_patch", "project", "version_id", "AUTH", "SSL", "test_encryption", "test_encryption_pyopenssl", "test_crypt_shared", "test_pyopenssl", "SETDEFAULTENCODING", "test_loadbalancer", "test_serverless", "SKIP_CSOT_TESTS", "MONGODB_STARTED", "DISABLE_TEST_COMMANDS", "GREEN_FRAMEWORK", "NO_EXT", "COVERAGE", "COMPRESSORS", "TEST_SUITES", "MONGODB_API_VERSION", "SKIP_HATCH", "skip_crypt_shared", "VERSION", "TOPOLOGY", "STORAGE_ENGINE", "ORCHESTRATION_FILE", "REQUIRE_API_VERSION", "LOAD_BALANCER", "skip_web_identity_auth_test", "skip_ECS_auth_test"] + include_expansions_in_env: ["is_patch", "project", "version_id", "AUTH", "SSL", "test_encryption", "test_encryption_pyopenssl", "test_crypt_shared", "test_pyopenssl", "SETDEFAULTENCODING", "test_loadbalancer", "test_serverless", "SKIP_CSOT_TESTS", "MONGODB_STARTED", "DISABLE_TEST_COMMANDS", "GREEN_FRAMEWORK", "NO_EXT", "COVERAGE", "COMPRESSORS", "TEST_SUITES", "MONGODB_API_VERSION", "skip_crypt_shared", "VERSION", "TOPOLOGY", "STORAGE_ENGINE", "ORCHESTRATION_FILE", "REQUIRE_API_VERSION", "LOAD_BALANCER", "skip_web_identity_auth_test", "skip_ECS_auth_test"] binary: bash working_dir: "src" args: diff --git a/.evergreen/generated_configs/variants.yml b/.evergreen/generated_configs/variants.yml index 928347f567..226f4238f2 100644 --- a/.evergreen/generated_configs/variants.yml +++ b/.evergreen/generated_configs/variants.yml @@ -8,7 +8,6 @@ buildvariants: - rhel79-small batchtime: 10080 expansions: - SKIP_HATCH: "true" PYTHON_BINARY: /opt/python/3.9/bin/python3 - name: other-hosts-rhel9-fips tasks: @@ -17,8 +16,6 @@ buildvariants: run_on: - rhel92-fips batchtime: 10080 - expansions: - SKIP_HATCH: "true" - name: other-hosts-rhel8-zseries tasks: - name: .6.0 .standalone !.sync_async @@ -26,8 +23,6 @@ buildvariants: run_on: - rhel8-zseries-small batchtime: 10080 - expansions: - SKIP_HATCH: "true" - name: other-hosts-rhel8-power8 tasks: - name: .6.0 .standalone !.sync_async @@ -35,8 +30,6 @@ buildvariants: run_on: - rhel8-power-small batchtime: 10080 - expansions: - SKIP_HATCH: "true" - name: other-hosts-rhel8-arm64 tasks: - name: .6.0 .standalone !.sync_async @@ -44,8 +37,6 @@ buildvariants: run_on: - rhel82-arm64-small batchtime: 10080 - expansions: - SKIP_HATCH: "true" # Atlas connect tests - name: atlas-connect-rhel8-python3.9 diff --git a/.evergreen/hatch.sh b/.evergreen/hatch.sh index 98cd9ed734..c01dfcd19e 100755 --- a/.evergreen/hatch.sh +++ b/.evergreen/hatch.sh @@ -1,45 +1,5 @@ #!/bin/bash -set -o errexit # Exit the script with error if any of the commands fail -set -x +set -eu -. .evergreen/utils.sh - -if [ -z "$PYTHON_BINARY" ]; then - PYTHON_BINARY=$(find_python3) -fi - -# Check if we should skip hatch and run the tests directly. -if [ -n "$SKIP_HATCH" ]; then - ENV_NAME=testenv-$RANDOM - createvirtualenv "$PYTHON_BINARY" $ENV_NAME - # shellcheck disable=SC2064 - trap "deactivate; rm -rf $ENV_NAME" EXIT HUP - python -m pip install -e ".[test]" - run_hatch() { - bash ./.evergreen/run-tests.sh - } -else # Set up virtualenv before installing hatch - # Use a random venv name because the encryption tasks run this script multiple times in the same run. - ENV_NAME=hatchenv-$RANDOM - createvirtualenv "$PYTHON_BINARY" $ENV_NAME - # shellcheck disable=SC2064 - trap "deactivate; rm -rf $ENV_NAME" EXIT HUP - python -m pip install -q hatch - - # Ensure hatch does not write to user or global locations. - touch hatch_config.toml - HATCH_CONFIG=$(pwd)/hatch_config.toml - if [ "Windows_NT" = "${OS:-}" ]; then # Magic variable in cygwin - HATCH_CONFIG=$(cygpath -m "$HATCH_CONFIG") - fi - export HATCH_CONFIG - hatch config restore - hatch config set dirs.data "$(pwd)/.hatch/data" - hatch config set dirs.cache "$(pwd)/.hatch/cache" - - run_hatch() { - python -m hatch run "$@" - } -fi - -run_hatch "${@:1}" +. .evergreen/scripts/ensure-hatch.sh +hatch run "$@" diff --git a/.evergreen/install-dependencies.sh b/.evergreen/install-dependencies.sh index 9f4bcdbb59..4c0541a4e2 100755 --- a/.evergreen/install-dependencies.sh +++ b/.evergreen/install-dependencies.sh @@ -8,6 +8,9 @@ cp ${PROJECT_DIRECTORY}/test/certificates/* ${DRIVERS_TOOLS}/.evergreen/x509gen/ # Replace MongoOrchestration's client certificate. cp ${PROJECT_DIRECTORY}/test/certificates/client.pem ${MONGO_ORCHESTRATION_HOME}/lib/client.pem +# Ensure hatch is installed. +bash ${PROJECT_DIRECTORY}/scripts/ensure-hatch.sh + if [ -w /etc/hosts ]; then SUDO="" else diff --git a/.evergreen/run-mongodb-aws-ecs-test.sh b/.evergreen/run-mongodb-aws-ecs-test.sh index 3905a08764..3189a6cc6c 100755 --- a/.evergreen/run-mongodb-aws-ecs-test.sh +++ b/.evergreen/run-mongodb-aws-ecs-test.sh @@ -22,13 +22,13 @@ set -o xtrace # Install python with pip. PYTHON_VER="python3.9" -apt-get update -apt-get install $PYTHON_VER python3-pip build-essential $PYTHON_VER-dev -y +apt-get -qq update < /dev/null > /dev/null +apt-get -qq install $PYTHON_VER $PYTHON_VER-venv build-essential $PYTHON_VER-dev -y < /dev/null > /dev/null export PYTHON_BINARY=$PYTHON_VER export TEST_AUTH_AWS=1 export AUTH="auth" export SET_XTRACE_ON=1 cd src -$PYTHON_BINARY -m pip install -q --user hatch +rm -rf .venv bash .evergreen/hatch.sh test:test-eg diff --git a/.evergreen/scripts/configure-env.sh b/.evergreen/scripts/configure-env.sh index 3c0a0436de..313f4c3c92 100755 --- a/.evergreen/scripts/configure-env.sh +++ b/.evergreen/scripts/configure-env.sh @@ -11,11 +11,13 @@ fi PROJECT_DIRECTORY="$(pwd)" DRIVERS_TOOLS="$(dirname $PROJECT_DIRECTORY)/drivers-tools" +CARGO_HOME=${CARGO_HOME:-${DRIVERS_TOOLS}/.cargo} # Python has cygwin path problems on Windows. Detect prospective mongo-orchestration home directory if [ "Windows_NT" = "$OS" ]; then # Magic variable in cygwin DRIVERS_TOOLS=$(cygpath -m $DRIVERS_TOOLS) PROJECT_DIRECTORY=$(cygpath -m $PROJECT_DIRECTORY) + CARGO_HOME=$(cygpath -m $CARGO_HOME) fi SCRIPT_DIR="$PROJECT_DIRECTORY/.evergreen/scripts" @@ -47,13 +49,13 @@ export NO_EXT="${NO_EXT:-}" export COVERAGE="${COVERAGE:-}" export COMPRESSORS="${COMPRESSORS:-}" export MONGODB_API_VERSION="${MONGODB_API_VERSION:-}" -export SKIP_HATCH="${SKIP_HATCH:-}" export skip_crypt_shared="${skip_crypt_shared:-}" export STORAGE_ENGINE="${STORAGE_ENGINE:-}" export REQUIRE_API_VERSION="${REQUIRE_API_VERSION:-}" export skip_web_identity_auth_test="${skip_web_identity_auth_test:-}" export skip_ECS_auth_test="${skip_ECS_auth_test:-}" +export CARGO_HOME="$CARGO_HOME" export TMPDIR="$MONGO_ORCHESTRATION_HOME/db" export PATH="$MONGODB_BINARIES:$PATH" # shellcheck disable=SC2154 diff --git a/.evergreen/scripts/ensure-hatch.sh b/.evergreen/scripts/ensure-hatch.sh new file mode 100755 index 0000000000..a57b705127 --- /dev/null +++ b/.evergreen/scripts/ensure-hatch.sh @@ -0,0 +1,55 @@ +#!/bin/bash + +set -eu + +# Ensure hatch is available. +if [ ! -x "$(command -v hatch)" ]; then + # Install a virtual env with "hatch" + # Ensure there is a python venv. + . .evergreen/utils.sh + + if [ -z "${PYTHON_BINARY:-}" ]; then + PYTHON_BINARY=$(find_python3) + fi + VENV_DIR=.venv + if [ ! -d $VENV_DIR ]; then + echo "Creating virtual environment..." + createvirtualenv "$PYTHON_BINARY" .venv + echo "Creating virtual environment... done." + fi + if [ -f $VENV_DIR/Scripts/activate ]; then + . $VENV_DIR/Scripts/activate + else + . $VENV_DIR/bin/activate + fi + + python --version + + echo "Installing hatch..." + python -m pip install -U pip + python -m pip install hatch || { + # Install rust and try again. + CARGO_HOME=${CARGO_HOME:-${DRIVERS_TOOLS}/.cargo} + # Handle paths on Windows. + if [ "Windows_NT" = "${OS:-}" ]; then # Magic variable in cygwin + CARGO_HOME=$(cygpath -m $CARGO_HOME) + fi + export RUSTUP_HOME="${CARGO_HOME}/.rustup" + ${DRIVERS_TOOLS}/.evergreen/install-rust.sh + source "${CARGO_HOME}/env" + python -m pip install hatch + } + # Ensure hatch does not write to user or global locations. + touch hatch_config.toml + HATCH_CONFIG=$(pwd)/hatch_config.toml + if [ "Windows_NT" = "${OS:-}" ]; then # Magic variable in cygwin + HATCH_CONFIG=$(cygpath -m "$HATCH_CONFIG") + fi + export HATCH_CONFIG + hatch config restore + hatch config set dirs.data "$(pwd)/.hatch/data" + hatch config set dirs.cache "$(pwd)/.hatch/cache" + + echo "Installing hatch... done." +fi +hatch --version diff --git a/.evergreen/scripts/generate_config.py b/.evergreen/scripts/generate_config.py index b7187b50db..c7f55fa946 100644 --- a/.evergreen/scripts/generate_config.py +++ b/.evergreen/scripts/generate_config.py @@ -744,7 +744,6 @@ def create_aws_auth_variants(): def create_alternative_hosts_variants(): - expansions = dict(SKIP_HATCH="true") batchtime = BATCHTIME_WEEK variants = [] @@ -752,11 +751,10 @@ def create_alternative_hosts_variants(): variants.append( create_variant( [".5.0 .standalone !.sync_async"], - get_display_name("OpenSSL 1.0.2", host, python=CPYTHONS[0], **expansions), + get_display_name("OpenSSL 1.0.2", host, python=CPYTHONS[0]), host=host, python=CPYTHONS[0], batchtime=batchtime, - expansions=expansions, ) ) @@ -765,8 +763,7 @@ def create_alternative_hosts_variants(): variants.append( create_variant( [".6.0 .standalone !.sync_async"], - display_name=get_display_name("Other hosts", host, **expansions), - expansions=expansions, + display_name=get_display_name("Other hosts", host), batchtime=batchtime, host=host, ) diff --git a/.evergreen/scripts/run-enterprise-auth-tests.sh b/.evergreen/scripts/run-enterprise-auth-tests.sh index 31371ead45..11f8db22e1 100755 --- a/.evergreen/scripts/run-enterprise-auth-tests.sh +++ b/.evergreen/scripts/run-enterprise-auth-tests.sh @@ -2,5 +2,6 @@ # Disable xtrace for security reasons (just in case it was accidentally set). set +x -bash "${DRIVERS_TOOLS}"/.evergreen/auth_aws/setup_secrets.sh drivers/enterprise_auth +# Use the default python to bootstrap secrets. +PYTHON_BINARY="" bash "${DRIVERS_TOOLS}"/.evergreen/auth_aws/setup_secrets.sh drivers/enterprise_auth TEST_ENTERPRISE_AUTH=1 AUTH=auth bash "${PROJECT_DIRECTORY}"/.evergreen/hatch.sh test:test-eg diff --git a/.evergreen/scripts/run-tests.sh b/.evergreen/scripts/run-tests.sh index 495db83e70..6986a0bbee 100755 --- a/.evergreen/scripts/run-tests.sh +++ b/.evergreen/scripts/run-tests.sh @@ -51,5 +51,4 @@ GREEN_FRAMEWORK=${GREEN_FRAMEWORK} \ TEST_DATA_LAKE=${TEST_DATA_LAKE:-} \ TEST_SUITES=${TEST_SUITES:-} \ MONGODB_API_VERSION=${MONGODB_API_VERSION} \ - SKIP_HATCH=${SKIP_HATCH} \ bash "${PROJECT_DIRECTORY}"/.evergreen/hatch.sh test:test-eg diff --git a/.gitignore b/.gitignore index 69dd20efa3..e4587125e8 100644 --- a/.gitignore +++ b/.gitignore @@ -23,6 +23,8 @@ secrets-export.sh libmongocrypt.tar.gz libmongocrypt/ libmongocrypt_git/ +hatch_config.toml +.venv # Lambda temp files test/lambda/.aws-sam From ff2f95987f945fed483dcf802082ac3a173eb905 Mon Sep 17 00:00:00 2001 From: Shane Harvey Date: Tue, 3 Dec 2024 16:16:47 -0800 Subject: [PATCH 123/182] PYTHON-2560 Retry KMS requests on transient errors (#2024) --- pymongo/asynchronous/encryption.py | 64 +++++++++++++++------ pymongo/synchronous/encryption.py | 64 +++++++++++++++------ test/asynchronous/test_encryption.py | 86 +++++++++++++++++++++++++++- test/test_encryption.py | 86 +++++++++++++++++++++++++++- 4 files changed, 260 insertions(+), 40 deletions(-) diff --git a/pymongo/asynchronous/encryption.py b/pymongo/asynchronous/encryption.py index 4802c3f54e..1cf165e6a2 100644 --- a/pymongo/asynchronous/encryption.py +++ b/pymongo/asynchronous/encryption.py @@ -19,6 +19,7 @@ import contextlib import enum import socket +import time as time # noqa: PLC0414 # needed in sync version import uuid import weakref from copy import deepcopy @@ -63,7 +64,11 @@ from pymongo.asynchronous.cursor import AsyncCursor from pymongo.asynchronous.database import AsyncDatabase from pymongo.asynchronous.mongo_client import AsyncMongoClient -from pymongo.asynchronous.pool import _configured_socket, _raise_connection_failure +from pymongo.asynchronous.pool import ( + _configured_socket, + _get_timeout_details, + _raise_connection_failure, +) from pymongo.common import CONNECT_TIMEOUT from pymongo.daemon import _spawn_daemon from pymongo.encryption_options import AutoEncryptionOpts, RangeOpts @@ -72,7 +77,7 @@ EncryptedCollectionError, EncryptionError, InvalidOperation, - PyMongoError, + NetworkTimeout, ServerSelectionTimeoutError, ) from pymongo.network_layer import BLOCKING_IO_ERRORS, async_sendall @@ -88,6 +93,9 @@ if TYPE_CHECKING: from pymongocrypt.mongocrypt import MongoCryptKmsContext + from pymongo.pyopenssl_context import _sslConn + from pymongo.typings import _Address + _IS_SYNC = False @@ -103,6 +111,13 @@ _KEY_VAULT_OPTS = CodecOptions(document_class=RawBSONDocument) +async def _connect_kms(address: _Address, opts: PoolOptions) -> Union[socket.socket, _sslConn]: + try: + return await _configured_socket(address, opts) + except Exception as exc: + _raise_connection_failure(address, exc, timeout_details=_get_timeout_details(opts)) + + @contextlib.contextmanager def _wrap_encryption_errors() -> Iterator[None]: """Context manager to wrap encryption related errors.""" @@ -166,8 +181,8 @@ async def kms_request(self, kms_context: MongoCryptKmsContext) -> None: None, # crlfile False, # allow_invalid_certificates False, # allow_invalid_hostnames - False, - ) # disable_ocsp_endpoint_check + False, # disable_ocsp_endpoint_check + ) # CSOT: set timeout for socket creation. connect_timeout = max(_csot.clamp_remaining(_KMS_CONNECT_TIMEOUT), 0.001) opts = PoolOptions( @@ -175,9 +190,13 @@ async def kms_request(self, kms_context: MongoCryptKmsContext) -> None: socket_timeout=connect_timeout, ssl_context=ctx, ) - host, port = parse_host(endpoint, _HTTPS_PORT) + address = parse_host(endpoint, _HTTPS_PORT) + sleep_u = kms_context.usleep + if sleep_u: + sleep_sec = float(sleep_u) / 1e6 + await asyncio.sleep(sleep_sec) try: - conn = await _configured_socket((host, port), opts) + conn = await _connect_kms(address, opts) try: await async_sendall(conn, message) while kms_context.bytes_needed > 0: @@ -194,20 +213,29 @@ async def kms_request(self, kms_context: MongoCryptKmsContext) -> None: if not data: raise OSError("KMS connection closed") kms_context.feed(data) - # Async raises an OSError instead of returning empty bytes - except OSError as err: - raise OSError("KMS connection closed") from err - except BLOCKING_IO_ERRORS: - raise socket.timeout("timed out") from None + except MongoCryptError: + raise # Propagate MongoCryptError errors directly. + except Exception as exc: + # Wrap I/O errors in PyMongo exceptions. + if isinstance(exc, BLOCKING_IO_ERRORS): + exc = socket.timeout("timed out") + _raise_connection_failure(address, exc, timeout_details=_get_timeout_details(opts)) finally: conn.close() - except (PyMongoError, MongoCryptError): - raise # Propagate pymongo errors directly. - except asyncio.CancelledError: - raise - except Exception as error: - # Wrap I/O errors in PyMongo exceptions. - _raise_connection_failure((host, port), error) + except MongoCryptError: + raise # Propagate MongoCryptError errors directly. + except Exception as exc: + remaining = _csot.remaining() + if isinstance(exc, NetworkTimeout) or (remaining is not None and remaining <= 0): + raise + # Mark this attempt as failed and defer to libmongocrypt to retry. + try: + kms_context.fail() + except MongoCryptError as final_err: + exc = MongoCryptError( + f"{final_err}, last attempt failed with: {exc}", final_err.code + ) + raise exc from final_err async def collection_info(self, database: str, filter: bytes) -> Optional[bytes]: """Get the collection info for a namespace. diff --git a/pymongo/synchronous/encryption.py b/pymongo/synchronous/encryption.py index 09d0c0f2fd..ef49855059 100644 --- a/pymongo/synchronous/encryption.py +++ b/pymongo/synchronous/encryption.py @@ -19,6 +19,7 @@ import contextlib import enum import socket +import time as time # noqa: PLC0414 # needed in sync version import uuid import weakref from copy import deepcopy @@ -67,7 +68,7 @@ EncryptedCollectionError, EncryptionError, InvalidOperation, - PyMongoError, + NetworkTimeout, ServerSelectionTimeoutError, ) from pymongo.network_layer import BLOCKING_IO_ERRORS, sendall @@ -80,7 +81,11 @@ from pymongo.synchronous.cursor import Cursor from pymongo.synchronous.database import Database from pymongo.synchronous.mongo_client import MongoClient -from pymongo.synchronous.pool import _configured_socket, _raise_connection_failure +from pymongo.synchronous.pool import ( + _configured_socket, + _get_timeout_details, + _raise_connection_failure, +) from pymongo.typings import _DocumentType, _DocumentTypeArg from pymongo.uri_parser import parse_host from pymongo.write_concern import WriteConcern @@ -88,6 +93,9 @@ if TYPE_CHECKING: from pymongocrypt.mongocrypt import MongoCryptKmsContext + from pymongo.pyopenssl_context import _sslConn + from pymongo.typings import _Address + _IS_SYNC = True @@ -103,6 +111,13 @@ _KEY_VAULT_OPTS = CodecOptions(document_class=RawBSONDocument) +def _connect_kms(address: _Address, opts: PoolOptions) -> Union[socket.socket, _sslConn]: + try: + return _configured_socket(address, opts) + except Exception as exc: + _raise_connection_failure(address, exc, timeout_details=_get_timeout_details(opts)) + + @contextlib.contextmanager def _wrap_encryption_errors() -> Iterator[None]: """Context manager to wrap encryption related errors.""" @@ -166,8 +181,8 @@ def kms_request(self, kms_context: MongoCryptKmsContext) -> None: None, # crlfile False, # allow_invalid_certificates False, # allow_invalid_hostnames - False, - ) # disable_ocsp_endpoint_check + False, # disable_ocsp_endpoint_check + ) # CSOT: set timeout for socket creation. connect_timeout = max(_csot.clamp_remaining(_KMS_CONNECT_TIMEOUT), 0.001) opts = PoolOptions( @@ -175,9 +190,13 @@ def kms_request(self, kms_context: MongoCryptKmsContext) -> None: socket_timeout=connect_timeout, ssl_context=ctx, ) - host, port = parse_host(endpoint, _HTTPS_PORT) + address = parse_host(endpoint, _HTTPS_PORT) + sleep_u = kms_context.usleep + if sleep_u: + sleep_sec = float(sleep_u) / 1e6 + time.sleep(sleep_sec) try: - conn = _configured_socket((host, port), opts) + conn = _connect_kms(address, opts) try: sendall(conn, message) while kms_context.bytes_needed > 0: @@ -194,20 +213,29 @@ def kms_request(self, kms_context: MongoCryptKmsContext) -> None: if not data: raise OSError("KMS connection closed") kms_context.feed(data) - # Async raises an OSError instead of returning empty bytes - except OSError as err: - raise OSError("KMS connection closed") from err - except BLOCKING_IO_ERRORS: - raise socket.timeout("timed out") from None + except MongoCryptError: + raise # Propagate MongoCryptError errors directly. + except Exception as exc: + # Wrap I/O errors in PyMongo exceptions. + if isinstance(exc, BLOCKING_IO_ERRORS): + exc = socket.timeout("timed out") + _raise_connection_failure(address, exc, timeout_details=_get_timeout_details(opts)) finally: conn.close() - except (PyMongoError, MongoCryptError): - raise # Propagate pymongo errors directly. - except asyncio.CancelledError: - raise - except Exception as error: - # Wrap I/O errors in PyMongo exceptions. - _raise_connection_failure((host, port), error) + except MongoCryptError: + raise # Propagate MongoCryptError errors directly. + except Exception as exc: + remaining = _csot.remaining() + if isinstance(exc, NetworkTimeout) or (remaining is not None and remaining <= 0): + raise + # Mark this attempt as failed and defer to libmongocrypt to retry. + try: + kms_context.fail() + except MongoCryptError as final_err: + exc = MongoCryptError( + f"{final_err}, last attempt failed with: {exc}", final_err.code + ) + raise exc from final_err def collection_info(self, database: str, filter: bytes) -> Optional[bytes]: """Get the collection info for a namespace. diff --git a/test/asynchronous/test_encryption.py b/test/asynchronous/test_encryption.py index 21cd5e2666..559b06ddf4 100644 --- a/test/asynchronous/test_encryption.py +++ b/test/asynchronous/test_encryption.py @@ -17,6 +17,8 @@ import base64 import copy +import http.client +import json import os import pathlib import re @@ -91,6 +93,7 @@ WriteError, ) from pymongo.operations import InsertOne, ReplaceOne, UpdateOne +from pymongo.ssl_support import get_ssl_context from pymongo.write_concern import WriteConcern _IS_SYNC = False @@ -1366,9 +1369,8 @@ async def test_04_aws_endpoint_invalid_port(self): "key": ("arn:aws:kms:us-east-1:579766882180:key/89fcc2c4-08b0-4bd9-9f25-e30687b580d0"), "endpoint": "kms.us-east-1.amazonaws.com:12345", } - with self.assertRaisesRegex(EncryptionError, "kms.us-east-1.amazonaws.com:12345") as ctx: + with self.assertRaisesRegex(EncryptionError, "kms.us-east-1.amazonaws.com:12345"): await self.client_encryption.create_data_key("aws", master_key=master_key) - self.assertIsInstance(ctx.exception.cause, AutoReconnect) @unittest.skipUnless(any(AWS_CREDS.values()), "AWS environment credentials are not set") async def test_05_aws_endpoint_wrong_region(self): @@ -2853,6 +2855,86 @@ async def test_accepts_trim_factor_0(self): assert len(payload) > len(self.payload_defaults) +# https://github.com/mongodb/specifications/blob/master/source/client-side-encryption/tests/README.md#24-kms-retry-tests +class TestKmsRetryProse(AsyncEncryptionIntegrationTest): + @unittest.skipUnless(any(AWS_CREDS.values()), "AWS environment credentials are not set") + async def asyncSetUp(self): + await super().asyncSetUp() + # 1, create client with only tlsCAFile. + providers: dict = copy.deepcopy(ALL_KMS_PROVIDERS) + providers["azure"]["identityPlatformEndpoint"] = "127.0.0.1:9003" + providers["gcp"]["endpoint"] = "127.0.0.1:9003" + kms_tls_opts = { + p: {"tlsCAFile": CA_PEM, "tlsCertificateKeyFile": CLIENT_PEM} for p in providers + } + self.client_encryption = self.create_client_encryption( + providers, "keyvault.datakeys", self.client, OPTS, kms_tls_options=kms_tls_opts + ) + + async def http_post(self, path, data=None): + # Note, the connection to the mock server needs to be closed after + # each request because the server is single threaded. + ctx: ssl.SSLContext = get_ssl_context( + CLIENT_PEM, # certfile + None, # passphrase + CA_PEM, # ca_certs + None, # crlfile + False, # allow_invalid_certificates + False, # allow_invalid_hostnames + False, # disable_ocsp_endpoint_check + ) + conn = http.client.HTTPSConnection("127.0.0.1:9003", context=ctx) + try: + if data is not None: + headers = {"Content-type": "application/json"} + body = json.dumps(data) + else: + headers = {} + body = None + conn.request("POST", path, body, headers) + res = conn.getresponse() + res.read() + finally: + conn.close() + + async def _test(self, provider, master_key): + await self.http_post("/reset") + # Case 1: createDataKey and encrypt with TCP retry + await self.http_post("/set_failpoint/network", {"count": 1}) + key_id = await self.client_encryption.create_data_key(provider, master_key=master_key) + await self.http_post("/set_failpoint/network", {"count": 1}) + await self.client_encryption.encrypt( + 123, Algorithm.AEAD_AES_256_CBC_HMAC_SHA_512_Deterministic, key_id + ) + + # Case 2: createDataKey and encrypt with HTTP retry + await self.http_post("/set_failpoint/http", {"count": 1}) + key_id = await self.client_encryption.create_data_key(provider, master_key=master_key) + await self.http_post("/set_failpoint/http", {"count": 1}) + await self.client_encryption.encrypt( + 123, Algorithm.AEAD_AES_256_CBC_HMAC_SHA_512_Deterministic, key_id + ) + + # Case 3: createDataKey fails after too many retries + await self.http_post("/set_failpoint/network", {"count": 4}) + with self.assertRaisesRegex(EncryptionError, "KMS request failed after"): + await self.client_encryption.create_data_key(provider, master_key=master_key) + + async def test_kms_retry(self): + await self._test("aws", {"region": "foo", "key": "bar", "endpoint": "127.0.0.1:9003"}) + await self._test("azure", {"keyVaultEndpoint": "127.0.0.1:9003", "keyName": "foo"}) + await self._test( + "gcp", + { + "projectId": "foo", + "location": "bar", + "keyRing": "baz", + "keyName": "qux", + "endpoint": "127.0.0.1:9003", + }, + ) + + # https://github.com/mongodb/specifications/blob/master/source/client-side-encryption/tests/README.md#automatic-data-encryption-keys class TestAutomaticDecryptionKeys(AsyncEncryptionIntegrationTest): @async_client_context.require_no_standalone diff --git a/test/test_encryption.py b/test/test_encryption.py index 18e21fe6a7..7a9929b7fd 100644 --- a/test/test_encryption.py +++ b/test/test_encryption.py @@ -17,6 +17,8 @@ import base64 import copy +import http.client +import json import os import pathlib import re @@ -88,6 +90,7 @@ WriteError, ) from pymongo.operations import InsertOne, ReplaceOne, UpdateOne +from pymongo.ssl_support import get_ssl_context from pymongo.synchronous import encryption from pymongo.synchronous.encryption import Algorithm, ClientEncryption, QueryType from pymongo.synchronous.mongo_client import MongoClient @@ -1360,9 +1363,8 @@ def test_04_aws_endpoint_invalid_port(self): "key": ("arn:aws:kms:us-east-1:579766882180:key/89fcc2c4-08b0-4bd9-9f25-e30687b580d0"), "endpoint": "kms.us-east-1.amazonaws.com:12345", } - with self.assertRaisesRegex(EncryptionError, "kms.us-east-1.amazonaws.com:12345") as ctx: + with self.assertRaisesRegex(EncryptionError, "kms.us-east-1.amazonaws.com:12345"): self.client_encryption.create_data_key("aws", master_key=master_key) - self.assertIsInstance(ctx.exception.cause, AutoReconnect) @unittest.skipUnless(any(AWS_CREDS.values()), "AWS environment credentials are not set") def test_05_aws_endpoint_wrong_region(self): @@ -2835,6 +2837,86 @@ def test_accepts_trim_factor_0(self): assert len(payload) > len(self.payload_defaults) +# https://github.com/mongodb/specifications/blob/master/source/client-side-encryption/tests/README.md#24-kms-retry-tests +class TestKmsRetryProse(EncryptionIntegrationTest): + @unittest.skipUnless(any(AWS_CREDS.values()), "AWS environment credentials are not set") + def setUp(self): + super().setUp() + # 1, create client with only tlsCAFile. + providers: dict = copy.deepcopy(ALL_KMS_PROVIDERS) + providers["azure"]["identityPlatformEndpoint"] = "127.0.0.1:9003" + providers["gcp"]["endpoint"] = "127.0.0.1:9003" + kms_tls_opts = { + p: {"tlsCAFile": CA_PEM, "tlsCertificateKeyFile": CLIENT_PEM} for p in providers + } + self.client_encryption = self.create_client_encryption( + providers, "keyvault.datakeys", self.client, OPTS, kms_tls_options=kms_tls_opts + ) + + def http_post(self, path, data=None): + # Note, the connection to the mock server needs to be closed after + # each request because the server is single threaded. + ctx: ssl.SSLContext = get_ssl_context( + CLIENT_PEM, # certfile + None, # passphrase + CA_PEM, # ca_certs + None, # crlfile + False, # allow_invalid_certificates + False, # allow_invalid_hostnames + False, # disable_ocsp_endpoint_check + ) + conn = http.client.HTTPSConnection("127.0.0.1:9003", context=ctx) + try: + if data is not None: + headers = {"Content-type": "application/json"} + body = json.dumps(data) + else: + headers = {} + body = None + conn.request("POST", path, body, headers) + res = conn.getresponse() + res.read() + finally: + conn.close() + + def _test(self, provider, master_key): + self.http_post("/reset") + # Case 1: createDataKey and encrypt with TCP retry + self.http_post("/set_failpoint/network", {"count": 1}) + key_id = self.client_encryption.create_data_key(provider, master_key=master_key) + self.http_post("/set_failpoint/network", {"count": 1}) + self.client_encryption.encrypt( + 123, Algorithm.AEAD_AES_256_CBC_HMAC_SHA_512_Deterministic, key_id + ) + + # Case 2: createDataKey and encrypt with HTTP retry + self.http_post("/set_failpoint/http", {"count": 1}) + key_id = self.client_encryption.create_data_key(provider, master_key=master_key) + self.http_post("/set_failpoint/http", {"count": 1}) + self.client_encryption.encrypt( + 123, Algorithm.AEAD_AES_256_CBC_HMAC_SHA_512_Deterministic, key_id + ) + + # Case 3: createDataKey fails after too many retries + self.http_post("/set_failpoint/network", {"count": 4}) + with self.assertRaisesRegex(EncryptionError, "KMS request failed after"): + self.client_encryption.create_data_key(provider, master_key=master_key) + + def test_kms_retry(self): + self._test("aws", {"region": "foo", "key": "bar", "endpoint": "127.0.0.1:9003"}) + self._test("azure", {"keyVaultEndpoint": "127.0.0.1:9003", "keyName": "foo"}) + self._test( + "gcp", + { + "projectId": "foo", + "location": "bar", + "keyRing": "baz", + "keyName": "qux", + "endpoint": "127.0.0.1:9003", + }, + ) + + # https://github.com/mongodb/specifications/blob/master/source/client-side-encryption/tests/README.md#automatic-data-encryption-keys class TestAutomaticDecryptionKeys(EncryptionIntegrationTest): @client_context.require_no_standalone From 89852ba7046a3020075e4008c5b3df25a3c0889f Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Wed, 4 Dec 2024 07:24:28 -0600 Subject: [PATCH 124/182] PYTHON-5001 Fix import time check (#2027) --- .evergreen/run-import-time-test.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.evergreen/run-import-time-test.sh b/.evergreen/run-import-time-test.sh index 2b17f5ffeb..e9f6161bcc 100755 --- a/.evergreen/run-import-time-test.sh +++ b/.evergreen/run-import-time-test.sh @@ -25,9 +25,9 @@ function get_import_time() { } get_import_time $HEAD_SHA -git stash +git stash || true git checkout $BASE_SHA get_import_time $BASE_SHA git checkout $HEAD_SHA -git stash apply +git stash apply || true python tools/compare_import_time.py $HEAD_SHA $BASE_SHA From 11287e12570efeb86f4fb927d3170fa8e2cc59df Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Wed, 4 Dec 2024 09:18:13 -0600 Subject: [PATCH 125/182] PYTHON-5004 Fix handling of TEST_PATH (#2028) --- test/asynchronous/unified_format.py | 1 + test/unified_format.py | 1 + 2 files changed, 2 insertions(+) diff --git a/test/asynchronous/unified_format.py b/test/asynchronous/unified_format.py index b18b09383e..52d964eb3e 100644 --- a/test/asynchronous/unified_format.py +++ b/test/asynchronous/unified_format.py @@ -442,6 +442,7 @@ class UnifiedSpecTestMixinV1(AsyncIntegrationTest): RUN_ON_LOAD_BALANCER = True RUN_ON_SERVERLESS = True TEST_SPEC: Any + TEST_PATH = "" # This gets filled in by generate_test_classes mongos_clients: list[AsyncMongoClient] = [] @staticmethod diff --git a/test/unified_format.py b/test/unified_format.py index 5cb268a29d..372eb8abba 100644 --- a/test/unified_format.py +++ b/test/unified_format.py @@ -441,6 +441,7 @@ class UnifiedSpecTestMixinV1(IntegrationTest): RUN_ON_LOAD_BALANCER = True RUN_ON_SERVERLESS = True TEST_SPEC: Any + TEST_PATH = "" # This gets filled in by generate_test_classes mongos_clients: list[MongoClient] = [] @staticmethod From dc34833d97d1a97ac38fc03117a84ad5f87220c6 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Wed, 4 Dec 2024 09:26:29 -0600 Subject: [PATCH 126/182] PYTHON-5005 Skip more csot tests where applicable (#2029) --- test/asynchronous/test_cursor.py | 5 +++++ test/test_csot.py | 4 ++++ test/test_cursor.py | 5 +++++ 3 files changed, 14 insertions(+) diff --git a/test/asynchronous/test_cursor.py b/test/asynchronous/test_cursor.py index d216479451..d843ffb4aa 100644 --- a/test/asynchronous/test_cursor.py +++ b/test/asynchronous/test_cursor.py @@ -18,6 +18,7 @@ import copy import gc import itertools +import os import random import re import sys @@ -1412,6 +1413,8 @@ async def test_to_list_length(self): self.assertEqual(len(docs), 2) async def test_to_list_csot_applied(self): + if os.environ.get("SKIP_CSOT_TESTS", ""): + raise unittest.SkipTest("SKIP_CSOT_TESTS is set, skipping...") client = await self.async_single_client(timeoutMS=500, w=1) coll = client.pymongo.test # Initialize the client with a larger timeout to help make test less flakey @@ -1453,6 +1456,8 @@ async def test_command_cursor_to_list_length(self): @async_client_context.require_failCommand_blockConnection async def test_command_cursor_to_list_csot_applied(self): + if os.environ.get("SKIP_CSOT_TESTS", ""): + raise unittest.SkipTest("SKIP_CSOT_TESTS is set, skipping...") client = await self.async_single_client(timeoutMS=500, w=1) coll = client.pymongo.test # Initialize the client with a larger timeout to help make test less flakey diff --git a/test/test_csot.py b/test/test_csot.py index 64210b4d64..c075a07d5a 100644 --- a/test/test_csot.py +++ b/test/test_csot.py @@ -39,6 +39,8 @@ class TestCSOT(IntegrationTest): RUN_ON_LOAD_BALANCER = True def test_timeout_nested(self): + if os.environ.get("SKIP_CSOT_TESTS", ""): + raise unittest.SkipTest("SKIP_CSOT_TESTS is set, skipping...") coll = self.db.coll self.assertEqual(_csot.get_timeout(), None) self.assertEqual(_csot.get_deadline(), float("inf")) @@ -76,6 +78,8 @@ def test_timeout_nested(self): @client_context.require_change_streams def test_change_stream_can_resume_after_timeouts(self): + if os.environ.get("SKIP_CSOT_TESTS", ""): + raise unittest.SkipTest("SKIP_CSOT_TESTS is set, skipping...") coll = self.db.test coll.insert_one({}) with coll.watch() as stream: diff --git a/test/test_cursor.py b/test/test_cursor.py index bcc7ed75f1..84e431f8cb 100644 --- a/test/test_cursor.py +++ b/test/test_cursor.py @@ -18,6 +18,7 @@ import copy import gc import itertools +import os import random import re import sys @@ -1403,6 +1404,8 @@ def test_to_list_length(self): self.assertEqual(len(docs), 2) def test_to_list_csot_applied(self): + if os.environ.get("SKIP_CSOT_TESTS", ""): + raise unittest.SkipTest("SKIP_CSOT_TESTS is set, skipping...") client = self.single_client(timeoutMS=500, w=1) coll = client.pymongo.test # Initialize the client with a larger timeout to help make test less flakey @@ -1444,6 +1447,8 @@ def test_command_cursor_to_list_length(self): @client_context.require_failCommand_blockConnection def test_command_cursor_to_list_csot_applied(self): + if os.environ.get("SKIP_CSOT_TESTS", ""): + raise unittest.SkipTest("SKIP_CSOT_TESTS is set, skipping...") client = self.single_client(timeoutMS=500, w=1) coll = client.pymongo.test # Initialize the client with a larger timeout to help make test less flakey From 5204e87ca235ffbc34446dd4f8ba1c6b8d571e17 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Wed, 4 Dec 2024 09:35:06 -0600 Subject: [PATCH 127/182] PYTHON-5002 Add guard to synchro hook to accidental overwrite (#2026) --- .pre-commit-config.yaml | 1 + CONTRIBUTING.md | 12 ++++++++++++ tools/synchro.py | 15 +++++++++++++++ tools/synchro.sh | 6 ++++-- 4 files changed, 32 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4f6759bc5a..a0b06ab0dc 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -24,6 +24,7 @@ repos: entry: bash ./tools/synchro.sh language: python require_serial: true + fail_fast: true additional_dependencies: - ruff==0.1.3 - unasync diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 7516fbc9ed..814e040048 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -246,6 +246,18 @@ you are attempting to validate new spec tests in PyMongo. Follow the [Python Driver Release Process Wiki](https://wiki.corp.mongodb.com/display/DRIVERS/Python+Driver+Release+Process). +## Asyncio considerations + +PyMongo adds asyncio capability by modifying the source files in `*/asynchronous` to `*/synchronous` using +[unasync](https://github.com/python-trio/unasync/) and some custom transforms. + +Where possible, edit the code in `*/asynchronous/*.py` and not the synchronous files. +You can run `pre-commit run --all-files synchro` before running tests if you are testing synchronous code. + +To prevent the `synchro` hook from accidentally overwriting code, it first checks to see whether a sync version +of a file is changing and not its async counterpart, and will fail. +In the unlikely scenario that you want to override this behavior, first export `OVERRIDE_SYNCHRO_CHECK=1`. + ## Converting a test to async The `tools/convert_test_to_async.py` script takes in an existing synchronous test file and outputs a partially-converted asynchronous version of the same name to the `test/asynchronous` directory. diff --git a/tools/synchro.py b/tools/synchro.py index 47617365f4..577e82d14e 100644 --- a/tools/synchro.py +++ b/tools/synchro.py @@ -19,7 +19,9 @@ from __future__ import annotations +import os import re +import sys from os import listdir from pathlib import Path @@ -356,6 +358,19 @@ def unasync_directory(files: list[str], src: str, dest: str, replacements: dict[ def main() -> None: + modified_files = [f"./{f}" for f in sys.argv[1:]] + errored = False + for fname in async_files + gridfs_files: + # If the async file was modified, we don't need to check if the sync file was also modified. + if str(fname) in modified_files: + continue + sync_name = str(fname).replace("asynchronous", "synchronous") + if sync_name in modified_files and "OVERRIDE_SYNCHRO_CHECK" not in os.environ: + print(f"Refusing to overwrite {sync_name}") + errored = True + if errored: + raise ValueError("Aborting synchro due to errors") + unasync_directory(async_files, _pymongo_base, _pymongo_dest_base, replacements) unasync_directory(gridfs_files, _gridfs_base, _gridfs_dest_base, replacements) unasync_directory(test_files, _test_base, _test_dest_base, replacements) diff --git a/tools/synchro.sh b/tools/synchro.sh index 2887509fe9..51c51a9548 100755 --- a/tools/synchro.sh +++ b/tools/synchro.sh @@ -1,5 +1,7 @@ -#!/bin/bash -eu +#!/bin/bash -python ./tools/synchro.py +set -eu + +python ./tools/synchro.py "$@" python -m ruff check pymongo/synchronous/ gridfs/synchronous/ test/ --fix --silent python -m ruff format pymongo/synchronous/ gridfs/synchronous/ test/ --silent From 1b89da4829945359ae99ecfb579104d2d28f7b27 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Wed, 4 Dec 2024 10:53:27 -0600 Subject: [PATCH 128/182] PYTHON-5006 Skip test_kms_retry when using PyOpenSSL (#2030) --- test/asynchronous/test_encryption.py | 9 +++++++++ test/test_encryption.py | 9 +++++++++ 2 files changed, 18 insertions(+) diff --git a/test/asynchronous/test_encryption.py b/test/asynchronous/test_encryption.py index 559b06ddf4..48f791ac16 100644 --- a/test/asynchronous/test_encryption.py +++ b/test/asynchronous/test_encryption.py @@ -42,6 +42,11 @@ from pymongo.asynchronous.helpers import anext from pymongo.daemon import _spawn_daemon +try: + from pymongo.pyopenssl_context import IS_PYOPENSSL +except ImportError: + IS_PYOPENSSL = False + sys.path[0:0] = [""] from test import ( @@ -2921,6 +2926,10 @@ async def _test(self, provider, master_key): await self.client_encryption.create_data_key(provider, master_key=master_key) async def test_kms_retry(self): + if IS_PYOPENSSL: + self.skipTest( + "PyOpenSSL does not support a required method for this test, Connection.makefile" + ) await self._test("aws", {"region": "foo", "key": "bar", "endpoint": "127.0.0.1:9003"}) await self._test("azure", {"keyVaultEndpoint": "127.0.0.1:9003", "keyName": "foo"}) await self._test( diff --git a/test/test_encryption.py b/test/test_encryption.py index 7a9929b7fd..daa5fd5d4c 100644 --- a/test/test_encryption.py +++ b/test/test_encryption.py @@ -42,6 +42,11 @@ from pymongo.synchronous.collection import Collection from pymongo.synchronous.helpers import next +try: + from pymongo.pyopenssl_context import IS_PYOPENSSL +except ImportError: + IS_PYOPENSSL = False + sys.path[0:0] = [""] from test import ( @@ -2903,6 +2908,10 @@ def _test(self, provider, master_key): self.client_encryption.create_data_key(provider, master_key=master_key) def test_kms_retry(self): + if IS_PYOPENSSL: + self.skipTest( + "PyOpenSSL does not support a required method for this test, Connection.makefile" + ) self._test("aws", {"region": "foo", "key": "bar", "endpoint": "127.0.0.1:9003"}) self._test("azure", {"keyVaultEndpoint": "127.0.0.1:9003", "keyName": "foo"}) self._test( From 30e4cceb24f9c34f4bfb4ba3d00293f98b33b1f6 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Fri, 6 Dec 2024 08:12:30 -0600 Subject: [PATCH 129/182] PYTHON-5008 Do not build c extensions on other hosts (#2031) --- .evergreen/generated_configs/variants.yml | 8 ++++++++ .evergreen/scripts/generate_config.py | 3 +++ 2 files changed, 11 insertions(+) diff --git a/.evergreen/generated_configs/variants.yml b/.evergreen/generated_configs/variants.yml index 226f4238f2..b17a500ade 100644 --- a/.evergreen/generated_configs/variants.yml +++ b/.evergreen/generated_configs/variants.yml @@ -16,6 +16,8 @@ buildvariants: run_on: - rhel92-fips batchtime: 10080 + expansions: + NO_EXT: "1" - name: other-hosts-rhel8-zseries tasks: - name: .6.0 .standalone !.sync_async @@ -23,6 +25,8 @@ buildvariants: run_on: - rhel8-zseries-small batchtime: 10080 + expansions: + NO_EXT: "1" - name: other-hosts-rhel8-power8 tasks: - name: .6.0 .standalone !.sync_async @@ -30,6 +34,8 @@ buildvariants: run_on: - rhel8-power-small batchtime: 10080 + expansions: + NO_EXT: "1" - name: other-hosts-rhel8-arm64 tasks: - name: .6.0 .standalone !.sync_async @@ -37,6 +43,8 @@ buildvariants: run_on: - rhel82-arm64-small batchtime: 10080 + expansions: + NO_EXT: "1" # Atlas connect tests - name: atlas-connect-rhel8-python3.9 diff --git a/.evergreen/scripts/generate_config.py b/.evergreen/scripts/generate_config.py index c7f55fa946..1637ae9711 100644 --- a/.evergreen/scripts/generate_config.py +++ b/.evergreen/scripts/generate_config.py @@ -758,6 +758,8 @@ def create_alternative_hosts_variants(): ) ) + expansions = dict() + handle_c_ext(C_EXTS[0], expansions) for host_name in OTHER_HOSTS: host = HOSTS[host_name] variants.append( @@ -766,6 +768,7 @@ def create_alternative_hosts_variants(): display_name=get_display_name("Other hosts", host), batchtime=batchtime, host=host, + expansions=expansions, ) ) return variants From d2fe1ed1542632b0fd8d49b6525a469afb2b6146 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 9 Dec 2024 15:07:34 -0600 Subject: [PATCH 130/182] Bump pyright from 1.1.389 to 1.1.390 (#2032) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/typing.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/typing.txt b/requirements/typing.txt index 613eba7645..404fe00748 100644 --- a/requirements/typing.txt +++ b/requirements/typing.txt @@ -1,5 +1,5 @@ mypy==1.13.0 -pyright==1.1.389 +pyright==1.1.390 typing_extensions -r ./encryption.txt -r ./ocsp.txt From 2d21035396f63437176965cc8f157505189f2f08 Mon Sep 17 00:00:00 2001 From: "Jeffrey A. Clark" Date: Fri, 20 Dec 2024 12:58:08 -0500 Subject: [PATCH 131/182] PYTHON-2187 Remove easy_install from documentation (#2033) --- doc/examples/authentication.rst | 2 +- doc/installation.rst | 10 ++-------- 2 files changed, 3 insertions(+), 9 deletions(-) diff --git a/doc/examples/authentication.rst b/doc/examples/authentication.rst index b319df814c..a92222bafc 100644 --- a/doc/examples/authentication.rst +++ b/doc/examples/authentication.rst @@ -139,7 +139,7 @@ Unix ~~~~ To authenticate using GSSAPI you must first install the python `kerberos`_ or -`pykerberos`_ module using easy_install or pip. Make sure you run kinit before +`pykerberos`_ module using pip. Make sure you run kinit before using the following authentication methods:: $ kinit mongodbuser@EXAMPLE.COM diff --git a/doc/installation.rst b/doc/installation.rst index dd8eb6ab42..f21a3792ad 100644 --- a/doc/installation.rst +++ b/doc/installation.rst @@ -6,7 +6,7 @@ Installing / Upgrading `_. .. warning:: **Do not install the "bson" package from pypi.** PyMongo comes - with its own bson package; doing "pip install bson" or "easy_install bson" + with its own bson package; doing "pip install bson" installs a third-party package that is incompatible with PyMongo. Installing with pip @@ -134,7 +134,7 @@ Python to fail to build the C extensions if you have Xcode 4 installed. There is a workaround:: # For some Python builds from python.org - $ env ARCHFLAGS='-arch i386 -arch x86_64' python -m easy_install pymongo + $ env ARCHFLAGS='-arch i386 -arch x86_64' python -m pip install pymongo See `http://bugs.python.org/issue11623 `_ for a more detailed explanation. @@ -152,15 +152,9 @@ This may cause C extension builds to fail with an error similar to:: There are workarounds:: # Apple specified workaround for Xcode 5.1 - # easy_install - $ ARCHFLAGS=-Wno-error=unused-command-line-argument-hard-error-in-future easy_install pymongo - # or pip $ ARCHFLAGS=-Wno-error=unused-command-line-argument-hard-error-in-future pip install pymongo # Alternative workaround using CFLAGS - # easy_install - $ CFLAGS=-Qunused-arguments easy_install pymongo - # or pip $ CFLAGS=-Qunused-arguments pip install pymongo From bdaf43c53df07a97ad9bb7f8f091b66bc7b82b60 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 30 Dec 2024 08:18:52 -0600 Subject: [PATCH 132/182] Bump pyright from 1.1.390 to 1.1.391 (#2035) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/typing.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/typing.txt b/requirements/typing.txt index 404fe00748..189f4f8719 100644 --- a/requirements/typing.txt +++ b/requirements/typing.txt @@ -1,5 +1,5 @@ mypy==1.13.0 -pyright==1.1.390 +pyright==1.1.391 typing_extensions -r ./encryption.txt -r ./ocsp.txt From b3ce9320f00b94047a0160f739e63b0a34427d8c Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Tue, 31 Dec 2024 13:26:45 -0600 Subject: [PATCH 133/182] PYTHON-5016 Create spawn host helper scripts (#2036) --- .evergreen/install-dependencies.sh | 5 ++--- .evergreen/scripts/configure-env.sh | 10 ++++++---- .evergreen/scripts/prepare-resources.sh | 9 +++++++-- .evergreen/scripts/setup-system.sh | 14 ++++++++++++++ .evergreen/setup-spawn-host.sh | 16 ++++++++++++++++ .evergreen/sync-spawn-host.sh | 13 +++++++++++++ 6 files changed, 58 insertions(+), 9 deletions(-) create mode 100755 .evergreen/scripts/setup-system.sh create mode 100755 .evergreen/setup-spawn-host.sh create mode 100755 .evergreen/sync-spawn-host.sh diff --git a/.evergreen/install-dependencies.sh b/.evergreen/install-dependencies.sh index 4c0541a4e2..d90ff4ab45 100755 --- a/.evergreen/install-dependencies.sh +++ b/.evergreen/install-dependencies.sh @@ -1,6 +1,5 @@ #!/bin/bash -set -o xtrace # Write all commands first to stderr -set -o errexit # Exit the script with error if any of the commands fail +set -eu # Copy PyMongo's test certificates over driver-evergreen-tools' cp ${PROJECT_DIRECTORY}/test/certificates/* ${DRIVERS_TOOLS}/.evergreen/x509gen/ @@ -9,7 +8,7 @@ cp ${PROJECT_DIRECTORY}/test/certificates/* ${DRIVERS_TOOLS}/.evergreen/x509gen/ cp ${PROJECT_DIRECTORY}/test/certificates/client.pem ${MONGO_ORCHESTRATION_HOME}/lib/client.pem # Ensure hatch is installed. -bash ${PROJECT_DIRECTORY}/scripts/ensure-hatch.sh +bash ${PROJECT_DIRECTORY}/.evergreen/scripts/ensure-hatch.sh if [ -w /etc/hosts ]; then SUDO="" diff --git a/.evergreen/scripts/configure-env.sh b/.evergreen/scripts/configure-env.sh index 313f4c3c92..e0c845a333 100755 --- a/.evergreen/scripts/configure-env.sh +++ b/.evergreen/scripts/configure-env.sh @@ -1,8 +1,10 @@ -#!/bin/bash -eux +#!/bin/bash + +set -eu # Get the current unique version of this checkout # shellcheck disable=SC2154 -if [ "$is_patch" = "true" ]; then +if [ "${is_patch:-}" = "true" ]; then # shellcheck disable=SC2154 CURRENT_VERSION="$(git describe)-patch-$version_id" else @@ -14,7 +16,7 @@ DRIVERS_TOOLS="$(dirname $PROJECT_DIRECTORY)/drivers-tools" CARGO_HOME=${CARGO_HOME:-${DRIVERS_TOOLS}/.cargo} # Python has cygwin path problems on Windows. Detect prospective mongo-orchestration home directory -if [ "Windows_NT" = "$OS" ]; then # Magic variable in cygwin +if [ "Windows_NT" = "${OS:-}" ]; then # Magic variable in cygwin DRIVERS_TOOLS=$(cygpath -m $DRIVERS_TOOLS) PROJECT_DIRECTORY=$(cygpath -m $PROJECT_DIRECTORY) CARGO_HOME=$(cygpath -m $CARGO_HOME) @@ -59,7 +61,7 @@ export CARGO_HOME="$CARGO_HOME" export TMPDIR="$MONGO_ORCHESTRATION_HOME/db" export PATH="$MONGODB_BINARIES:$PATH" # shellcheck disable=SC2154 -export PROJECT="$project" +export PROJECT="${project:-mongo-python-driver}" export PIP_QUIET=1 EOT diff --git a/.evergreen/scripts/prepare-resources.sh b/.evergreen/scripts/prepare-resources.sh index 33394b55ff..3cfa2c4efd 100755 --- a/.evergreen/scripts/prepare-resources.sh +++ b/.evergreen/scripts/prepare-resources.sh @@ -1,7 +1,10 @@ #!/bin/bash +set -eu + +HERE=$(dirname ${BASH_SOURCE:-$0}) +pushd $HERE +. env.sh -. src/.evergreen/scripts/env.sh -set -o xtrace rm -rf $DRIVERS_TOOLS if [ "$PROJECT" = "drivers-tools" ]; then # If this was a patch build, doing a fresh clone would not actually test the patch @@ -10,3 +13,5 @@ else git clone https://github.com/mongodb-labs/drivers-evergreen-tools.git $DRIVERS_TOOLS fi echo "{ \"releases\": { \"default\": \"$MONGODB_BINARIES\" }}" >$MONGO_ORCHESTRATION_HOME/orchestration.config + +popd diff --git a/.evergreen/scripts/setup-system.sh b/.evergreen/scripts/setup-system.sh new file mode 100755 index 0000000000..d78d924f6b --- /dev/null +++ b/.evergreen/scripts/setup-system.sh @@ -0,0 +1,14 @@ +#!/bin/bash + +set -eu + +HERE=$(dirname ${BASH_SOURCE:-$0}) +pushd "$(dirname "$(dirname $HERE)")" +echo "Setting up system..." +bash .evergreen/scripts/configure-env.sh +source .evergreen/scripts/env.sh +bash .evergreen/scripts/prepare-resources.sh +bash $DRIVERS_TOOLS/.evergreen/setup.sh +bash .evergreen/scripts/install-dependencies.sh +popd +echo "Setting up system... done." diff --git a/.evergreen/setup-spawn-host.sh b/.evergreen/setup-spawn-host.sh new file mode 100755 index 0000000000..4de2153d51 --- /dev/null +++ b/.evergreen/setup-spawn-host.sh @@ -0,0 +1,16 @@ +#!/bin/bash + +set -eu + +if [ -z "$1" ] + then + echo "Must supply a spawn host URL!" +fi + +target=$1 + +echo "Copying files to $target..." +rsync -az -e ssh --exclude '.git' --filter=':- .gitignore' -r . $target:/home/ec2-user/mongo-python-driver +echo "Copying files to $target... done" + +ssh $target /home/ec2-user/mongo-python-driver/.evergreen/scripts/setup-system.sh diff --git a/.evergreen/sync-spawn-host.sh b/.evergreen/sync-spawn-host.sh new file mode 100755 index 0000000000..4c3e276d41 --- /dev/null +++ b/.evergreen/sync-spawn-host.sh @@ -0,0 +1,13 @@ +#!/bin/bash + +if [ -z "$1" ] + then + echo "Must supply a spawn host URL!" +fi + +target=$1 + +echo "Syncing files to $target..." +# shellcheck disable=SC2034 +fswatch -o . | while read f; do rsync -hazv -e ssh --exclude '.git' --filter=':- .gitignore' -r . $target:/home/ec2-user/mongo-python-driver; done +echo "Syncing files to $target... done." From 8d27699e758145655cdb5bfc52b03fb81f3b02bb Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Tue, 31 Dec 2024 16:29:48 -0600 Subject: [PATCH 134/182] PYTHON-3096 Finish implementation and tests for GSSAPI options (#1985) --- .../scripts/run-enterprise-auth-tests.sh | 3 +- pymongo/asynchronous/auth.py | 14 ++-- pymongo/synchronous/auth.py | 14 ++-- test/asynchronous/test_auth.py | 66 +++++++++++++++++-- test/test_auth.py | 66 +++++++++++++++++-- 5 files changed, 144 insertions(+), 19 deletions(-) diff --git a/.evergreen/scripts/run-enterprise-auth-tests.sh b/.evergreen/scripts/run-enterprise-auth-tests.sh index 11f8db22e1..7f936b1955 100755 --- a/.evergreen/scripts/run-enterprise-auth-tests.sh +++ b/.evergreen/scripts/run-enterprise-auth-tests.sh @@ -1,7 +1,8 @@ #!/bin/bash +set -eu # Disable xtrace for security reasons (just in case it was accidentally set). set +x # Use the default python to bootstrap secrets. -PYTHON_BINARY="" bash "${DRIVERS_TOOLS}"/.evergreen/auth_aws/setup_secrets.sh drivers/enterprise_auth +bash "${DRIVERS_TOOLS}"/.evergreen/secrets_handling/setup-secrets.sh drivers/enterprise_auth TEST_ENTERPRISE_AUTH=1 AUTH=auth bash "${PROJECT_DIRECTORY}"/.evergreen/hatch.sh test:test-eg diff --git a/pymongo/asynchronous/auth.py b/pymongo/asynchronous/auth.py index fc563ec48f..48ce4bbd39 100644 --- a/pymongo/asynchronous/auth.py +++ b/pymongo/asynchronous/auth.py @@ -177,13 +177,20 @@ def _auth_key(nonce: str, username: str, password: str) -> str: return md5hash.hexdigest() -def _canonicalize_hostname(hostname: str) -> str: +def _canonicalize_hostname(hostname: str, option: str | bool) -> str: """Canonicalize hostname following MIT-krb5 behavior.""" # https://github.com/krb5/krb5/blob/d406afa363554097ac48646a29249c04f498c88e/src/util/k5test.py#L505-L520 + if option in [False, "none"]: + return hostname + af, socktype, proto, canonname, sockaddr = socket.getaddrinfo( hostname, None, 0, 0, socket.IPPROTO_TCP, socket.AI_CANONNAME )[0] + # For forward just to resolve the cname as dns.lookup() will not return it. + if option == "forward": + return canonname.lower() + try: name = socket.getnameinfo(sockaddr, socket.NI_NAMEREQD) except socket.gaierror: @@ -205,9 +212,8 @@ async def _authenticate_gssapi(credentials: MongoCredential, conn: AsyncConnecti props = credentials.mechanism_properties # Starting here and continuing through the while loop below - establish # the security context. See RFC 4752, Section 3.1, first paragraph. - host = conn.address[0] - if props.canonicalize_host_name: - host = _canonicalize_hostname(host) + host = props.service_host or conn.address[0] + host = _canonicalize_hostname(host, props.canonicalize_host_name) service = props.service_name + "@" + host if props.service_realm is not None: service = service + "@" + props.service_realm diff --git a/pymongo/synchronous/auth.py b/pymongo/synchronous/auth.py index 7b370843c5..0e51ff8b7f 100644 --- a/pymongo/synchronous/auth.py +++ b/pymongo/synchronous/auth.py @@ -174,13 +174,20 @@ def _auth_key(nonce: str, username: str, password: str) -> str: return md5hash.hexdigest() -def _canonicalize_hostname(hostname: str) -> str: +def _canonicalize_hostname(hostname: str, option: str | bool) -> str: """Canonicalize hostname following MIT-krb5 behavior.""" # https://github.com/krb5/krb5/blob/d406afa363554097ac48646a29249c04f498c88e/src/util/k5test.py#L505-L520 + if option in [False, "none"]: + return hostname + af, socktype, proto, canonname, sockaddr = socket.getaddrinfo( hostname, None, 0, 0, socket.IPPROTO_TCP, socket.AI_CANONNAME )[0] + # For forward just to resolve the cname as dns.lookup() will not return it. + if option == "forward": + return canonname.lower() + try: name = socket.getnameinfo(sockaddr, socket.NI_NAMEREQD) except socket.gaierror: @@ -202,9 +209,8 @@ def _authenticate_gssapi(credentials: MongoCredential, conn: Connection) -> None props = credentials.mechanism_properties # Starting here and continuing through the while loop below - establish # the security context. See RFC 4752, Section 3.1, first paragraph. - host = conn.address[0] - if props.canonicalize_host_name: - host = _canonicalize_hostname(host) + host = props.service_host or conn.address[0] + host = _canonicalize_hostname(host, props.canonicalize_host_name) service = props.service_name + "@" + host if props.service_realm is not None: service = service + "@" + props.service_realm diff --git a/test/asynchronous/test_auth.py b/test/asynchronous/test_auth.py index 4f26200fb0..08dc4d7247 100644 --- a/test/asynchronous/test_auth.py +++ b/test/asynchronous/test_auth.py @@ -35,7 +35,7 @@ import pytest from pymongo import AsyncMongoClient, monitoring -from pymongo.asynchronous.auth import HAVE_KERBEROS +from pymongo.asynchronous.auth import HAVE_KERBEROS, _canonicalize_hostname from pymongo.auth_shared import _build_credentials_tuple from pymongo.errors import OperationFailure from pymongo.hello import HelloCompat @@ -96,10 +96,11 @@ def setUpClass(cls): cls.service_realm_required = ( GSSAPI_SERVICE_REALM is not None and GSSAPI_SERVICE_REALM not in GSSAPI_PRINCIPAL ) - mech_properties = f"SERVICE_NAME:{GSSAPI_SERVICE_NAME}" - mech_properties += f",CANONICALIZE_HOST_NAME:{GSSAPI_CANONICALIZE}" + mech_properties = dict( + SERVICE_NAME=GSSAPI_SERVICE_NAME, CANONICALIZE_HOST_NAME=GSSAPI_CANONICALIZE + ) if GSSAPI_SERVICE_REALM is not None: - mech_properties += f",SERVICE_REALM:{GSSAPI_SERVICE_REALM}" + mech_properties["SERVICE_REALM"] = GSSAPI_SERVICE_REALM cls.mech_properties = mech_properties async def test_credentials_hashing(self): @@ -167,7 +168,10 @@ async def test_gssapi_simple(self): await client[GSSAPI_DB].collection.find_one() # Log in using URI, with authMechanismProperties. - mech_uri = uri + f"&authMechanismProperties={self.mech_properties}" + mech_properties_str = "" + for key, value in self.mech_properties.items(): + mech_properties_str += f"{key}:{value}," + mech_uri = uri + f"&authMechanismProperties={mech_properties_str[:-1]}" client = self.simple_client(mech_uri) await client[GSSAPI_DB].collection.find_one() @@ -268,6 +272,58 @@ async def test_gssapi_threaded(self): thread.join() self.assertTrue(thread.success) + async def test_gssapi_canonicalize_host_name(self): + # Test the low level method. + assert GSSAPI_HOST is not None + result = _canonicalize_hostname(GSSAPI_HOST, "forward") + if "compute-1.amazonaws.com" not in result: + self.assertEqual(result, GSSAPI_HOST) + result = _canonicalize_hostname(GSSAPI_HOST, "forwardAndReverse") + self.assertEqual(result, GSSAPI_HOST) + + # Use the equivalent named CANONICALIZE_HOST_NAME. + props = self.mech_properties.copy() + if props["CANONICALIZE_HOST_NAME"] == "true": + props["CANONICALIZE_HOST_NAME"] = "forwardAndReverse" + else: + props["CANONICALIZE_HOST_NAME"] = "none" + client = self.simple_client( + GSSAPI_HOST, + GSSAPI_PORT, + username=GSSAPI_PRINCIPAL, + password=GSSAPI_PASS, + authMechanism="GSSAPI", + authMechanismProperties=props, + ) + await client.server_info() + + async def test_gssapi_host_name(self): + props = self.mech_properties + props["SERVICE_HOST"] = "example.com" + + # Authenticate with authMechanismProperties. + client = self.simple_client( + GSSAPI_HOST, + GSSAPI_PORT, + username=GSSAPI_PRINCIPAL, + password=GSSAPI_PASS, + authMechanism="GSSAPI", + authMechanismProperties=self.mech_properties, + ) + with self.assertRaises(OperationFailure): + await client.server_info() + + props["SERVICE_HOST"] = GSSAPI_HOST + client = self.simple_client( + GSSAPI_HOST, + GSSAPI_PORT, + username=GSSAPI_PRINCIPAL, + password=GSSAPI_PASS, + authMechanism="GSSAPI", + authMechanismProperties=self.mech_properties, + ) + await client.server_info() + class TestSASLPlain(AsyncPyMongoTestCase): @classmethod diff --git a/test/test_auth.py b/test/test_auth.py index 70c061b747..345d16121b 100644 --- a/test/test_auth.py +++ b/test/test_auth.py @@ -40,7 +40,7 @@ from pymongo.hello import HelloCompat from pymongo.read_preferences import ReadPreference from pymongo.saslprep import HAVE_STRINGPREP -from pymongo.synchronous.auth import HAVE_KERBEROS +from pymongo.synchronous.auth import HAVE_KERBEROS, _canonicalize_hostname _IS_SYNC = True @@ -96,10 +96,11 @@ def setUpClass(cls): cls.service_realm_required = ( GSSAPI_SERVICE_REALM is not None and GSSAPI_SERVICE_REALM not in GSSAPI_PRINCIPAL ) - mech_properties = f"SERVICE_NAME:{GSSAPI_SERVICE_NAME}" - mech_properties += f",CANONICALIZE_HOST_NAME:{GSSAPI_CANONICALIZE}" + mech_properties = dict( + SERVICE_NAME=GSSAPI_SERVICE_NAME, CANONICALIZE_HOST_NAME=GSSAPI_CANONICALIZE + ) if GSSAPI_SERVICE_REALM is not None: - mech_properties += f",SERVICE_REALM:{GSSAPI_SERVICE_REALM}" + mech_properties["SERVICE_REALM"] = GSSAPI_SERVICE_REALM cls.mech_properties = mech_properties def test_credentials_hashing(self): @@ -167,7 +168,10 @@ def test_gssapi_simple(self): client[GSSAPI_DB].collection.find_one() # Log in using URI, with authMechanismProperties. - mech_uri = uri + f"&authMechanismProperties={self.mech_properties}" + mech_properties_str = "" + for key, value in self.mech_properties.items(): + mech_properties_str += f"{key}:{value}," + mech_uri = uri + f"&authMechanismProperties={mech_properties_str[:-1]}" client = self.simple_client(mech_uri) client[GSSAPI_DB].collection.find_one() @@ -268,6 +272,58 @@ def test_gssapi_threaded(self): thread.join() self.assertTrue(thread.success) + def test_gssapi_canonicalize_host_name(self): + # Test the low level method. + assert GSSAPI_HOST is not None + result = _canonicalize_hostname(GSSAPI_HOST, "forward") + if "compute-1.amazonaws.com" not in result: + self.assertEqual(result, GSSAPI_HOST) + result = _canonicalize_hostname(GSSAPI_HOST, "forwardAndReverse") + self.assertEqual(result, GSSAPI_HOST) + + # Use the equivalent named CANONICALIZE_HOST_NAME. + props = self.mech_properties.copy() + if props["CANONICALIZE_HOST_NAME"] == "true": + props["CANONICALIZE_HOST_NAME"] = "forwardAndReverse" + else: + props["CANONICALIZE_HOST_NAME"] = "none" + client = self.simple_client( + GSSAPI_HOST, + GSSAPI_PORT, + username=GSSAPI_PRINCIPAL, + password=GSSAPI_PASS, + authMechanism="GSSAPI", + authMechanismProperties=props, + ) + client.server_info() + + def test_gssapi_host_name(self): + props = self.mech_properties + props["SERVICE_HOST"] = "example.com" + + # Authenticate with authMechanismProperties. + client = self.simple_client( + GSSAPI_HOST, + GSSAPI_PORT, + username=GSSAPI_PRINCIPAL, + password=GSSAPI_PASS, + authMechanism="GSSAPI", + authMechanismProperties=self.mech_properties, + ) + with self.assertRaises(OperationFailure): + client.server_info() + + props["SERVICE_HOST"] = GSSAPI_HOST + client = self.simple_client( + GSSAPI_HOST, + GSSAPI_PORT, + username=GSSAPI_PRINCIPAL, + password=GSSAPI_PASS, + authMechanism="GSSAPI", + authMechanismProperties=self.mech_properties, + ) + client.server_info() + class TestSASLPlain(PyMongoTestCase): @classmethod From 71ef4e0c35417aeaa1dd794711ec11ec0b2f76e0 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Thu, 2 Jan 2025 10:12:53 -0600 Subject: [PATCH 135/182] PYTHON-5018 Use a single script for system setup (#2041) --- .evergreen/config.yml | 28 +--------------------------- 1 file changed, 1 insertion(+), 27 deletions(-) diff --git a/.evergreen/config.yml b/.evergreen/config.yml index 5c0e2983ea..aa0eee3620 100644 --- a/.evergreen/config.yml +++ b/.evergreen/config.yml @@ -46,24 +46,11 @@ functions: binary: bash working_dir: "src" args: - - .evergreen/scripts/configure-env.sh + - .evergreen/scripts/setup-system.sh # Load the expansion file to make an evergreen variable with the current unique version - command: expansions.update params: file: src/expansion.yml - - command: subprocess.exec - params: - include_expansions_in_env: ["PROJECT_DIRECTORY", "DRIVERS_TOOLS"] - binary: bash - args: - - src/.evergreen/scripts/prepare-resources.sh - # Run drivers-evergreen-tools system setup - - command: subprocess.exec - params: - include_expansions_in_env: ["PROJECT_DIRECTORY", "DRIVERS_TOOLS"] - binary: bash - args: - - ${DRIVERS_TOOLS}/.evergreen/setup.sh "upload coverage" : - command: ec2.assume_role @@ -546,15 +533,6 @@ functions: args: - ${DRIVERS_TOOLS}/.evergreen/teardown.sh - "install dependencies": - - command: subprocess.exec - params: - binary: bash - working_dir: "src" - args: - - .evergreen/scripts/run-with-env.sh - - .evergreen/scripts/install-dependencies.sh - "assume ec2 role": - command: ec2.assume_role params: @@ -650,7 +628,6 @@ functions: pre: - func: "fetch source" - func: "setup system" - - func: "install dependencies" - func: "assume ec2 role" post: @@ -958,7 +935,6 @@ tasks: - name: "test-aws-lambda-deployed" commands: - - func: "install dependencies" - command: ec2.assume_role params: role_arn: ${LAMBDA_AWS_ROLE_ARN} @@ -1547,8 +1523,6 @@ tasks: - name: testazurekms-fail-task commands: - - func: fetch source - - func: setup system - func: "bootstrap mongo-orchestration" vars: VERSION: "latest" From d2d8f6e29be2d6d2a23f61d2c1af6164b0c9e958 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Thu, 2 Jan 2025 10:54:04 -0600 Subject: [PATCH 136/182] PYTHON-5019 Fix mod_wsgi tests (#2039) --- .evergreen/install-dependencies.sh | 3 --- .evergreen/scripts/ensure-hatch.sh | 4 ++++ .evergreen/setup-spawn-host.sh | 6 ++++-- 3 files changed, 8 insertions(+), 5 deletions(-) diff --git a/.evergreen/install-dependencies.sh b/.evergreen/install-dependencies.sh index d90ff4ab45..e6dceb33fc 100755 --- a/.evergreen/install-dependencies.sh +++ b/.evergreen/install-dependencies.sh @@ -7,9 +7,6 @@ cp ${PROJECT_DIRECTORY}/test/certificates/* ${DRIVERS_TOOLS}/.evergreen/x509gen/ # Replace MongoOrchestration's client certificate. cp ${PROJECT_DIRECTORY}/test/certificates/client.pem ${MONGO_ORCHESTRATION_HOME}/lib/client.pem -# Ensure hatch is installed. -bash ${PROJECT_DIRECTORY}/.evergreen/scripts/ensure-hatch.sh - if [ -w /etc/hosts ]; then SUDO="" else diff --git a/.evergreen/scripts/ensure-hatch.sh b/.evergreen/scripts/ensure-hatch.sh index a57b705127..e63d98bb6d 100755 --- a/.evergreen/scripts/ensure-hatch.sh +++ b/.evergreen/scripts/ensure-hatch.sh @@ -2,6 +2,9 @@ set -eu +HERE=$(dirname ${BASH_SOURCE:-$0}) +pushd "$(dirname "$(dirname $HERE)")" > /dev/null + # Ensure hatch is available. if [ ! -x "$(command -v hatch)" ]; then # Install a virtual env with "hatch" @@ -53,3 +56,4 @@ if [ ! -x "$(command -v hatch)" ]; then echo "Installing hatch... done." fi hatch --version +popd > /dev/null diff --git a/.evergreen/setup-spawn-host.sh b/.evergreen/setup-spawn-host.sh index 4de2153d51..1a526c762c 100755 --- a/.evergreen/setup-spawn-host.sh +++ b/.evergreen/setup-spawn-host.sh @@ -8,9 +8,11 @@ if [ -z "$1" ] fi target=$1 +remote_dir=/home/ec2-user/mongo-python-driver echo "Copying files to $target..." -rsync -az -e ssh --exclude '.git' --filter=':- .gitignore' -r . $target:/home/ec2-user/mongo-python-driver +rsync -az -e ssh --exclude '.git' --filter=':- .gitignore' -r . $target:$remote_dir echo "Copying files to $target... done" -ssh $target /home/ec2-user/mongo-python-driver/.evergreen/scripts/setup-system.sh +ssh $target $remote_dir/.evergreen/scripts/setup-system.sh +ssh $target "PYTHON_BINARY=${PYTHON_BINARY:-} $remote_dir/.evergreen/scripts/ensure-hatch.sh" From 1b3f04c59996379dbaca8ace17e1b49e88432ce3 Mon Sep 17 00:00:00 2001 From: Shane Harvey Date: Thu, 2 Jan 2025 10:26:33 -0800 Subject: [PATCH 137/182] PYTHON-5020 Fix behavior of network timeouts on pyopenssl connections (#2037) --- pymongo/pyopenssl_context.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pymongo/pyopenssl_context.py b/pymongo/pyopenssl_context.py index 50d8680a74..a320e94929 100644 --- a/pymongo/pyopenssl_context.py +++ b/pymongo/pyopenssl_context.py @@ -125,7 +125,8 @@ def _call(self, call: Callable[..., _T], *args: Any, **kwargs: Any) -> _T: try: return call(*args, **kwargs) except BLOCKING_IO_ERRORS as exc: - if is_async: + # Do not retry if the connection is in non-blocking mode. + if is_async or timeout == 0: raise exc # Check for closed socket. if self.fileno() == -1: From 163514bce1c71c4f0d785575d9da5a8b4dea9141 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Thu, 2 Jan 2025 12:29:20 -0600 Subject: [PATCH 138/182] PYTHON-5016 Fix initial sync in spawn host script (#2038) --- .evergreen/sync-spawn-host.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/.evergreen/sync-spawn-host.sh b/.evergreen/sync-spawn-host.sh index 4c3e276d41..3d6c968901 100755 --- a/.evergreen/sync-spawn-host.sh +++ b/.evergreen/sync-spawn-host.sh @@ -8,6 +8,7 @@ fi target=$1 echo "Syncing files to $target..." +rsync -haz -e ssh --exclude '.git' --filter=':- .gitignore' -r . $target:/home/ec2-user/mongo-python-driver # shellcheck disable=SC2034 fswatch -o . | while read f; do rsync -hazv -e ssh --exclude '.git' --filter=':- .gitignore' -r . $target:/home/ec2-user/mongo-python-driver; done echo "Syncing files to $target... done." From bf415371bbe7eef157ab16662c5edf8fb42d0431 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Fri, 3 Jan 2025 06:19:18 -0600 Subject: [PATCH 139/182] PYTHON-4754 Add gevent test for Python 3.13 (#2044) --- .evergreen/generated_configs/variants.yml | 12 ++++++------ .evergreen/scripts/generate_config.py | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.evergreen/generated_configs/variants.yml b/.evergreen/generated_configs/variants.yml index b17a500ade..b77859bc91 100644 --- a/.evergreen/generated_configs/variants.yml +++ b/.evergreen/generated_configs/variants.yml @@ -650,28 +650,28 @@ buildvariants: AUTH: auth SSL: ssl PYTHON_BINARY: /opt/python/3.9/bin/python3 - - name: green-eventlet-rhel8-python3.12 + - name: green-eventlet-rhel8-python3.13 tasks: - name: .standalone .noauth .nossl .sync_async - display_name: Green Eventlet RHEL8 Python3.12 + display_name: Green Eventlet RHEL8 Python3.13 run_on: - rhel87-small expansions: GREEN_FRAMEWORK: eventlet AUTH: auth SSL: ssl - PYTHON_BINARY: /opt/python/3.12/bin/python3 - - name: green-gevent-rhel8-python3.12 + PYTHON_BINARY: /opt/python/3.13/bin/python3 + - name: green-gevent-rhel8-python3.13 tasks: - name: .standalone .noauth .nossl .sync_async - display_name: Green Gevent RHEL8 Python3.12 + display_name: Green Gevent RHEL8 Python3.13 run_on: - rhel87-small expansions: GREEN_FRAMEWORK: gevent AUTH: auth SSL: ssl - PYTHON_BINARY: /opt/python/3.12/bin/python3 + PYTHON_BINARY: /opt/python/3.13/bin/python3 # Load balancer tests - name: load-balancer-rhel8-v6.0-python3.9 diff --git a/.evergreen/scripts/generate_config.py b/.evergreen/scripts/generate_config.py index 1637ae9711..8a9ba8a206 100644 --- a/.evergreen/scripts/generate_config.py +++ b/.evergreen/scripts/generate_config.py @@ -554,7 +554,7 @@ def create_green_framework_variants(): variants = [] tasks = [".standalone .noauth .nossl .sync_async"] host = DEFAULT_HOST - for python, framework in product([CPYTHONS[0], CPYTHONS[-2]], ["eventlet", "gevent"]): + for python, framework in product([CPYTHONS[0], CPYTHONS[-1]], ["eventlet", "gevent"]): expansions = dict(GREEN_FRAMEWORK=framework, AUTH="auth", SSL="ssl") display_name = get_display_name(f"Green {framework.capitalize()}", host, python=python) variant = create_variant( From 27039c30bf6bd3e8167d084597cde28eb82a526f Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Fri, 3 Jan 2025 14:15:49 -0600 Subject: [PATCH 140/182] PYTHON-5003 Update pymongo's pymongocrypt version to >=1.12 (#2043) --- doc/changelog.rst | 1 + requirements/encryption.txt | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/doc/changelog.rst b/doc/changelog.rst index d9e6cc3f5b..d185e3c728 100644 --- a/doc/changelog.rst +++ b/doc/changelog.rst @@ -13,6 +13,7 @@ PyMongo 4.11 brings a number of changes including: - Dropped support for Python 3.8. - Dropped support for MongoDB 3.6. - Dropped support for the MONGODB-CR authenticate mechanism, which is no longer supported by MongoDB 4.0+. +- pymongocrypt>=1.12 is now required for :ref:`In-Use Encryption` support. - Added support for free-threaded Python with the GIL disabled. For more information see: `Free-threaded CPython `_. - :attr:`~pymongo.asynchronous.mongo_client.AsyncMongoClient.address` and diff --git a/requirements/encryption.txt b/requirements/encryption.txt index 1a8c14844c..5962f5028f 100644 --- a/requirements/encryption.txt +++ b/requirements/encryption.txt @@ -1,3 +1,3 @@ pymongo-auth-aws>=1.1.0,<2.0.0 -pymongocrypt>=1.10.0,<2.0.0 +pymongocrypt>=1.12.0,<2.0.0 certifi;os.name=='nt' or sys_platform=='darwin' From a1e681142b630f2cac295440b53b2b774ba0e5dc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 6 Jan 2025 06:57:50 -0600 Subject: [PATCH 141/182] Bump supercharge/mongodb-github-action from 1.11.0 to 1.12.0 in the actions group (#2046) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/test-python.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/test-python.yml b/.github/workflows/test-python.yml index 12cfaa4b27..2310b7698d 100644 --- a/.github/workflows/test-python.yml +++ b/.github/workflows/test-python.yml @@ -87,7 +87,7 @@ jobs: pip install hatch fi - name: Start MongoDB - uses: supercharge/mongodb-github-action@1.11.0 + uses: supercharge/mongodb-github-action@1.12.0 with: mongodb-version: 6.0 - name: Run tests @@ -115,7 +115,7 @@ jobs: run: | pip install -U hatch pip - name: Start MongoDB - uses: supercharge/mongodb-github-action@1.11.0 + uses: supercharge/mongodb-github-action@1.12.0 with: mongodb-version: '8.0.0-rc4' - name: Run tests @@ -230,7 +230,7 @@ jobs: # Test sdist on lowest supported Python python-version: '3.9' - name: Start MongoDB - uses: supercharge/mongodb-github-action@1.11.0 + uses: supercharge/mongodb-github-action@1.12.0 - name: Run connect test from sdist shell: bash run: | From a0de09efc6f7addaa68c5c2c1fc721690622db16 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 6 Jan 2025 06:58:50 -0600 Subject: [PATCH 142/182] Bump mypy from 1.13.0 to 1.14.1 (#2045) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/typing.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/typing.txt b/requirements/typing.txt index 189f4f8719..5a2f76f6bc 100644 --- a/requirements/typing.txt +++ b/requirements/typing.txt @@ -1,4 +1,4 @@ -mypy==1.13.0 +mypy==1.14.1 pyright==1.1.391 typing_extensions -r ./encryption.txt From fd5a10599b69338b6f5bf66542e06a550904e27d Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Mon, 6 Jan 2025 09:55:04 -0600 Subject: [PATCH 143/182] PYTHON-5016 Update scripts to handle Windows spawn hosts (#2047) --- .evergreen/setup-spawn-host.sh | 3 ++- .evergreen/sync-spawn-host.sh | 5 +++-- _setup.py | 5 +++++ pyproject.toml | 1 + 4 files changed, 11 insertions(+), 3 deletions(-) diff --git a/.evergreen/setup-spawn-host.sh b/.evergreen/setup-spawn-host.sh index 1a526c762c..4109e59183 100755 --- a/.evergreen/setup-spawn-host.sh +++ b/.evergreen/setup-spawn-host.sh @@ -8,7 +8,8 @@ if [ -z "$1" ] fi target=$1 -remote_dir=/home/ec2-user/mongo-python-driver +user=${target%@*} +remote_dir=/home/$user/mongo-python-driver echo "Copying files to $target..." rsync -az -e ssh --exclude '.git' --filter=':- .gitignore' -r . $target:$remote_dir diff --git a/.evergreen/sync-spawn-host.sh b/.evergreen/sync-spawn-host.sh index 3d6c968901..de3374a008 100755 --- a/.evergreen/sync-spawn-host.sh +++ b/.evergreen/sync-spawn-host.sh @@ -6,9 +6,10 @@ if [ -z "$1" ] fi target=$1 +user=${target%@*} echo "Syncing files to $target..." -rsync -haz -e ssh --exclude '.git' --filter=':- .gitignore' -r . $target:/home/ec2-user/mongo-python-driver +rsync -haz -e ssh --exclude '.git' --filter=':- .gitignore' -r . $target:/home/$user/mongo-python-driver # shellcheck disable=SC2034 -fswatch -o . | while read f; do rsync -hazv -e ssh --exclude '.git' --filter=':- .gitignore' -r . $target:/home/ec2-user/mongo-python-driver; done +fswatch -o . | while read f; do rsync -hazv -e ssh --exclude '.git' --filter=':- .gitignore' -r . $target:/home/$user/mongo-python-driver; done echo "Syncing files to $target... done." diff --git a/_setup.py b/_setup.py index 65ae1908fe..1a8b9e0246 100644 --- a/_setup.py +++ b/_setup.py @@ -82,6 +82,11 @@ def run(self): ) def build_extension(self, ext): + # "ProgramFiles(x86)" is not a valid environment variable in Cygwin but is needed for + # the MSVCCompiler in distutils. + if os.name == "nt": + if "ProgramFiles" in os.environ and "ProgramFiles(x86)" not in os.environ: + os.environ["ProgramFiles(x86)"] = os.environ["ProgramFiles"] + " (x86)" name = ext.name try: build_ext.build_extension(self, ext) diff --git a/pyproject.toml b/pyproject.toml index 9a29a777fc..a9977a382c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -212,6 +212,7 @@ dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?)|dummy.*)$" "tools/*.py" = ["T201"] "green_framework_test.py" = ["T201"] "hatch_build.py" = ["S"] +"_setup.py" = ["SIM112"] [tool.coverage.run] branch = true From da8c7aa4e057b95a46e1f9c8bedc53872db4dfd4 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Mon, 6 Jan 2025 14:25:36 -0600 Subject: [PATCH 144/182] PYTHON-5017 Use a separate PyPI publish step (#2042) --- .github/workflows/release-python.yml | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/.github/workflows/release-python.yml b/.github/workflows/release-python.yml index cee222d109..78f7ba1faf 100644 --- a/.github/workflows/release-python.yml +++ b/.github/workflows/release-python.yml @@ -67,6 +67,23 @@ jobs: publish: needs: [build-dist, static-scan] + name: Upload release to PyPI + runs-on: ubuntu-latest + environment: release + permissions: + id-token: write + steps: + - name: Download all the dists + uses: actions/download-artifact@v4 + with: + name: all-dist-${{ github.run_id }} + path: dist/ + - name: Publish package distributions to PyPI + if: startsWith(inputs.dry_run, 'false') + uses: pypa/gh-action-pypi-publish@release/v1 + + post-publish: + needs: [publish] runs-on: ubuntu-latest environment: release permissions: From c40283ed0926cd5a5191fab36bcb8cbf4d80cf5d Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Tue, 7 Jan 2025 12:48:28 -0600 Subject: [PATCH 145/182] PYTHON-4840 Add evergreen tests for free-threaded Python 3.13t (#2048) --- .evergreen/config.yml | 19 ++++++++++++++++- .evergreen/generated_configs/variants.yml | 26 +++++++++++++++++++++++ .evergreen/scripts/generate_config.py | 19 +++++++++++++++++ .evergreen/scripts/run-direct-tests.sh | 10 +++++++++ doc/changelog.rst | 3 +++ 5 files changed, 76 insertions(+), 1 deletion(-) create mode 100755 .evergreen/scripts/run-direct-tests.sh diff --git a/.evergreen/config.yml b/.evergreen/config.yml index aa0eee3620..f854f6bd3d 100644 --- a/.evergreen/config.yml +++ b/.evergreen/config.yml @@ -116,7 +116,6 @@ functions: content_type: text/html display_name: "Coverage Report HTML" - "upload mo artifacts": - command: ec2.assume_role params: @@ -300,6 +299,15 @@ functions: - .evergreen/scripts/run-with-env.sh - .evergreen/scripts/run-tests.sh + "run direct tests": + - command: subprocess.exec + type: test + params: + working_dir: "src" + binary: bash + include_expansions_in_env: ["PYTHON_BINARY"] + args: [ .evergreen/scripts/run-direct-tests.sh ] + "run enterprise auth tests": - command: subprocess.exec type: test @@ -920,6 +928,15 @@ tasks: commands: - func: "run tests" + - name: "free-threading" + tags: ["free-threading"] + commands: + - func: "bootstrap mongo-orchestration" + vars: + VERSION: "8.0" + TOPOLOGY: "replica_set" + - func: "run direct tests" + - name: "atlas-connect" tags: ["atlas-connect"] commands: diff --git a/.evergreen/generated_configs/variants.yml b/.evergreen/generated_configs/variants.yml index b77859bc91..b1db61d492 100644 --- a/.evergreen/generated_configs/variants.yml +++ b/.evergreen/generated_configs/variants.yml @@ -627,6 +627,32 @@ buildvariants: AUTH: auth PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 + # Free threaded tests + - name: free-threaded-rhel8-python3.13t + tasks: + - name: .free-threading + display_name: Free-threaded RHEL8 Python3.13t + run_on: + - rhel87-small + expansions: + PYTHON_BINARY: /opt/python/3.13t/bin/python3 + - name: free-threaded-macos-python3.13t + tasks: + - name: .free-threading + display_name: Free-threaded macOS Python3.13t + run_on: + - macos-14 + expansions: + PYTHON_BINARY: /Library/Frameworks/PythonT.Framework/Versions/3.13/bin/python3t + - name: free-threaded-macos-arm64-python3.13t + tasks: + - name: .free-threading + display_name: Free-threaded macOS Arm64 Python3.13t + run_on: + - macos-14-arm64 + expansions: + PYTHON_BINARY: /Library/Frameworks/PythonT.Framework/Versions/3.13/bin/python3t + # Green framework tests - name: green-eventlet-rhel8-python3.9 tasks: diff --git a/.evergreen/scripts/generate_config.py b/.evergreen/scripts/generate_config.py index 8a9ba8a206..e8d0b171bd 100644 --- a/.evergreen/scripts/generate_config.py +++ b/.evergreen/scripts/generate_config.py @@ -146,12 +146,16 @@ def get_python_binary(python: str, host: Host) -> str: else: base = "C:/python" python = python.replace(".", "") + if python == "313t": + return f"{base}/Python313/python3.13t.exe" return f"{base}/Python{python}/python.exe" if name in ["rhel8", "ubuntu22", "ubuntu20", "rhel7"]: return f"/opt/python/{python}/bin/python3" if name in ["macos", "macos-arm64"]: + if python == "3.13t": + return "/Library/Frameworks/PythonT.Framework/Versions/3.13/bin/python3t" return f"/Library/Frameworks/Python.Framework/Versions/{python}/bin/python3" raise ValueError(f"no match found for python {python} on {name}") @@ -318,6 +322,21 @@ def create_server_variants() -> list[BuildVariant]: return variants +def create_free_threaded_variants() -> list[BuildVariant]: + variants = [] + for host_name in ("rhel8", "macos", "macos-arm64", "win64"): + if host_name == "win64": + # TODO: PYTHON-5027 + continue + tasks = [".free-threading"] + host = HOSTS[host_name] + python = "3.13t" + display_name = get_display_name("Free-threaded", host, python=python) + variant = create_variant(tasks, display_name, python=python, host=host) + variants.append(variant) + return variants + + def create_encryption_variants() -> list[BuildVariant]: variants = [] tags = ["encryption_tag"] diff --git a/.evergreen/scripts/run-direct-tests.sh b/.evergreen/scripts/run-direct-tests.sh new file mode 100755 index 0000000000..a00235311c --- /dev/null +++ b/.evergreen/scripts/run-direct-tests.sh @@ -0,0 +1,10 @@ +#!/bin/bash +set -x +. .evergreen/utils.sh + +. .evergreen/scripts/env.sh +createvirtualenv "$PYTHON_BINARY" .venv + +export PYMONGO_C_EXT_MUST_BUILD=1 +pip install -e ".[test]" +pytest -v diff --git a/doc/changelog.rst b/doc/changelog.rst index d185e3c728..22c98f566c 100644 --- a/doc/changelog.rst +++ b/doc/changelog.rst @@ -16,6 +16,7 @@ PyMongo 4.11 brings a number of changes including: - pymongocrypt>=1.12 is now required for :ref:`In-Use Encryption` support. - Added support for free-threaded Python with the GIL disabled. For more information see: `Free-threaded CPython `_. + We do not yet support free-threaded Python on Windows (`PYTHON-5027`_) or with In-Use Encryption (`PYTHON-5024`_). - :attr:`~pymongo.asynchronous.mongo_client.AsyncMongoClient.address` and :attr:`~pymongo.mongo_client.MongoClient.address` now correctly block when called on unconnected clients until either connection succeeds or a server selection timeout error is raised. @@ -42,6 +43,8 @@ See the `PyMongo 4.11 release notes in JIRA`_ for the list of resolved issues in this release. .. _PyMongo 4.11 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=40784 +.. _PYTHON-5027: https://jira.mongodb.org/browse/PYTHON-5027 +.. _PYTHON-5024: https://jira.mongodb.org/browse/PYTHON-5024 Changes in Version 4.10.1 (2024/10/01) -------------------------------------- From 1f22139323c5ad863aeffbd25f112b0dd2940786 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Tue, 7 Jan 2025 12:49:52 -0600 Subject: [PATCH 146/182] PYTHON-4949 Communicate future minWireVersion bump / 4.0 EoL (#2050) --- doc/changelog.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/doc/changelog.rst b/doc/changelog.rst index 22c98f566c..fba6713bd9 100644 --- a/doc/changelog.rst +++ b/doc/changelog.rst @@ -7,6 +7,10 @@ Changes in Version 4.11.0 (YYYY/MM/DD) .. warning:: PyMongo 4.11 drops support for Python 3.8: Python 3.9+ or PyPy 3.9+ is now required. .. warning:: PyMongo 4.11 drops support for MongoDB 3.6. PyMongo now supports MongoDB 4.0+. Driver support for MongoDB 3.6 reached end of life in April 2024. +.. warning:: Driver support for MongoDB 4.0 reaches end of life in April 2025. + A future minor release of PyMongo will raise the minimum supported MongoDB Server version from 4.0 to 4.2. + This is in accordance with [MongoDB Software Lifecycle Schedules](https://www.mongodb.com/legal/support-policy/lifecycles). + **Support for MongoDB Server 4.0 will be dropped in a future release!** PyMongo 4.11 brings a number of changes including: From 42df09c4b4cfc63fa02e4fbc5c1930c29aedeec7 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Wed, 8 Jan 2025 17:07:18 -0600 Subject: [PATCH 147/182] PYTHON-5017 Fix post-publish step (#2051) --- .github/workflows/release-python.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release-python.yml b/.github/workflows/release-python.yml index 78f7ba1faf..ee4ea32f82 100644 --- a/.github/workflows/release-python.yml +++ b/.github/workflows/release-python.yml @@ -102,7 +102,7 @@ jobs: aws_region_name: ${{ vars.AWS_REGION_NAME }} aws_secret_id: ${{ secrets.AWS_SECRET_ID }} artifactory_username: ${{ vars.ARTIFACTORY_USERNAME }} - - uses: mongodb-labs/drivers-github-tools/python/publish@v2 + - uses: mongodb-labs/drivers-github-tools/python/post-publish@v2 with: version: ${{ inputs.version }} following_version: ${{ inputs.following_version }} From 53943ac5391d5a9fcd2283dd7a6d061b0537ef99 Mon Sep 17 00:00:00 2001 From: Shane Harvey Date: Thu, 9 Jan 2025 13:36:56 -0800 Subject: [PATCH 148/182] PYTHON-5011 Fix behavior of TLS connection errors on PyPy (#2052) --- pymongo/network_layer.py | 64 ++++++++++++++++++++++++++++++++++------ 1 file changed, 55 insertions(+), 9 deletions(-) diff --git a/pymongo/network_layer.py b/pymongo/network_layer.py index beffba6d18..4d21300bc6 100644 --- a/pymongo/network_layer.py +++ b/pymongo/network_layer.py @@ -28,7 +28,7 @@ Union, ) -from pymongo import ssl_support +from pymongo import _csot, ssl_support from pymongo._asyncio_task import create_task from pymongo.errors import _OperationCancelled from pymongo.socket_checker import _errno_from_exception @@ -316,6 +316,42 @@ async def _async_receive(conn: socket.socket, length: int, loop: AbstractEventLo return mv +_PYPY = "PyPy" in sys.version + + +def wait_for_read(conn: Connection, deadline: Optional[float]) -> None: + """Block until at least one byte is read, or a timeout, or a cancel.""" + sock = conn.conn + timed_out = False + # Check if the connection's socket has been manually closed + if sock.fileno() == -1: + return + while True: + # SSLSocket can have buffered data which won't be caught by select. + if hasattr(sock, "pending") and sock.pending() > 0: + readable = True + else: + # Wait up to 500ms for the socket to become readable and then + # check for cancellation. + if deadline: + remaining = deadline - time.monotonic() + # When the timeout has expired perform one final check to + # see if the socket is readable. This helps avoid spurious + # timeouts on AWS Lambda and other FaaS environments. + if remaining <= 0: + timed_out = True + timeout = max(min(remaining, _POLL_TIMEOUT), 0) + else: + timeout = _POLL_TIMEOUT + readable = conn.socket_checker.select(sock, read=True, timeout=timeout) + if conn.cancel_context.cancelled: + raise _OperationCancelled("operation cancelled") + if readable: + return + if timed_out: + raise socket.timeout("timed out") + + def receive_data(conn: Connection, length: int, deadline: Optional[float]) -> memoryview: buf = bytearray(length) mv = memoryview(buf) @@ -324,18 +360,25 @@ def receive_data(conn: Connection, length: int, deadline: Optional[float]) -> me # check for the cancellation signal after each timeout. Alternatively we # could close the socket but that does not reliably cancel recv() calls # on all OSes. + # When the timeout has expired we perform one final non-blocking recv. + # This helps avoid spurious timeouts when the response is actually already + # buffered on the client. orig_timeout = conn.conn.gettimeout() try: while bytes_read < length: - if deadline is not None: - # CSOT: Update timeout. When the timeout has expired perform one - # final non-blocking recv. This helps avoid spurious timeouts when - # the response is actually already buffered on the client. - short_timeout = min(max(deadline - time.monotonic(), 0), _POLL_TIMEOUT) - else: - short_timeout = _POLL_TIMEOUT - conn.set_conn_timeout(short_timeout) try: + # Use the legacy wait_for_read cancellation approach on PyPy due to PYTHON-5011. + if _PYPY: + wait_for_read(conn, deadline) + if _csot.get_timeout() and deadline is not None: + conn.set_conn_timeout(max(deadline - time.monotonic(), 0)) + else: + if deadline is not None: + short_timeout = min(max(deadline - time.monotonic(), 0), _POLL_TIMEOUT) + else: + short_timeout = _POLL_TIMEOUT + conn.set_conn_timeout(short_timeout) + chunk_length = conn.conn.recv_into(mv[bytes_read:]) except BLOCKING_IO_ERRORS: if conn.cancel_context.cancelled: @@ -345,6 +388,9 @@ def receive_data(conn: Connection, length: int, deadline: Optional[float]) -> me except socket.timeout: if conn.cancel_context.cancelled: raise _OperationCancelled("operation cancelled") from None + if _PYPY: + # We reached the true deadline. + raise continue except OSError as exc: if conn.cancel_context.cancelled: From 6c9a20a49d37bff2f2163011d1ccd686f83555a6 Mon Sep 17 00:00:00 2001 From: Shane Harvey Date: Fri, 10 Jan 2025 09:32:07 -0800 Subject: [PATCH 149/182] PYTHON-5014 Tests that use HTTPSConnection should only use stdlib ssl (#2053) --- test/asynchronous/test_encryption.py | 12 ++---------- test/test_encryption.py | 12 ++---------- 2 files changed, 4 insertions(+), 20 deletions(-) diff --git a/test/asynchronous/test_encryption.py b/test/asynchronous/test_encryption.py index 48f791ac16..10c4c8a564 100644 --- a/test/asynchronous/test_encryption.py +++ b/test/asynchronous/test_encryption.py @@ -98,7 +98,6 @@ WriteError, ) from pymongo.operations import InsertOne, ReplaceOne, UpdateOne -from pymongo.ssl_support import get_ssl_context from pymongo.write_concern import WriteConcern _IS_SYNC = False @@ -2879,15 +2878,8 @@ async def asyncSetUp(self): async def http_post(self, path, data=None): # Note, the connection to the mock server needs to be closed after # each request because the server is single threaded. - ctx: ssl.SSLContext = get_ssl_context( - CLIENT_PEM, # certfile - None, # passphrase - CA_PEM, # ca_certs - None, # crlfile - False, # allow_invalid_certificates - False, # allow_invalid_hostnames - False, # disable_ocsp_endpoint_check - ) + ctx = ssl.create_default_context(cafile=CA_PEM) + ctx.load_cert_chain(CLIENT_PEM) conn = http.client.HTTPSConnection("127.0.0.1:9003", context=ctx) try: if data is not None: diff --git a/test/test_encryption.py b/test/test_encryption.py index daa5fd5d4c..7b5aa776e6 100644 --- a/test/test_encryption.py +++ b/test/test_encryption.py @@ -95,7 +95,6 @@ WriteError, ) from pymongo.operations import InsertOne, ReplaceOne, UpdateOne -from pymongo.ssl_support import get_ssl_context from pymongo.synchronous import encryption from pymongo.synchronous.encryption import Algorithm, ClientEncryption, QueryType from pymongo.synchronous.mongo_client import MongoClient @@ -2861,15 +2860,8 @@ def setUp(self): def http_post(self, path, data=None): # Note, the connection to the mock server needs to be closed after # each request because the server is single threaded. - ctx: ssl.SSLContext = get_ssl_context( - CLIENT_PEM, # certfile - None, # passphrase - CA_PEM, # ca_certs - None, # crlfile - False, # allow_invalid_certificates - False, # allow_invalid_hostnames - False, # disable_ocsp_endpoint_check - ) + ctx = ssl.create_default_context(cafile=CA_PEM) + ctx.load_cert_chain(CLIENT_PEM) conn = http.client.HTTPSConnection("127.0.0.1:9003", context=ctx) try: if data is not None: From 493fc2ab3e237c2155fde4400002ed2aafe9b2be Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Fri, 10 Jan 2025 13:05:57 -0600 Subject: [PATCH 150/182] PYTHON-5014 Fix handling of async socket errors in kms request (#2054) --- pymongo/asynchronous/encryption.py | 9 ++++++++- pymongo/synchronous/encryption.py | 9 ++++++++- test/asynchronous/test_encryption.py | 12 ++++++++---- test/test_encryption.py | 12 ++++++++---- 4 files changed, 32 insertions(+), 10 deletions(-) diff --git a/pymongo/asynchronous/encryption.py b/pymongo/asynchronous/encryption.py index 1cf165e6a2..98ab68527c 100644 --- a/pymongo/asynchronous/encryption.py +++ b/pymongo/asynchronous/encryption.py @@ -219,7 +219,14 @@ async def kms_request(self, kms_context: MongoCryptKmsContext) -> None: # Wrap I/O errors in PyMongo exceptions. if isinstance(exc, BLOCKING_IO_ERRORS): exc = socket.timeout("timed out") - _raise_connection_failure(address, exc, timeout_details=_get_timeout_details(opts)) + # Async raises an OSError instead of returning empty bytes. + if isinstance(exc, OSError): + msg_prefix = "KMS connection closed" + else: + msg_prefix = None + _raise_connection_failure( + address, exc, msg_prefix=msg_prefix, timeout_details=_get_timeout_details(opts) + ) finally: conn.close() except MongoCryptError: diff --git a/pymongo/synchronous/encryption.py b/pymongo/synchronous/encryption.py index ef49855059..d41169861f 100644 --- a/pymongo/synchronous/encryption.py +++ b/pymongo/synchronous/encryption.py @@ -219,7 +219,14 @@ def kms_request(self, kms_context: MongoCryptKmsContext) -> None: # Wrap I/O errors in PyMongo exceptions. if isinstance(exc, BLOCKING_IO_ERRORS): exc = socket.timeout("timed out") - _raise_connection_failure(address, exc, timeout_details=_get_timeout_details(opts)) + # Async raises an OSError instead of returning empty bytes. + if isinstance(exc, OSError): + msg_prefix = "KMS connection closed" + else: + msg_prefix = None + _raise_connection_failure( + address, exc, msg_prefix=msg_prefix, timeout_details=_get_timeout_details(opts) + ) finally: conn.close() except MongoCryptError: diff --git a/test/asynchronous/test_encryption.py b/test/asynchronous/test_encryption.py index 10c4c8a564..ef53d8ccd5 100644 --- a/test/asynchronous/test_encryption.py +++ b/test/asynchronous/test_encryption.py @@ -2162,7 +2162,8 @@ async def test_01_aws(self): # 127.0.0.1:9001: ('Certificate does not contain any `subjectAltName`s.',) key["endpoint"] = "127.0.0.1:9001" with self.assertRaisesRegex( - EncryptionError, "IP address mismatch|wronghost|IPAddressMismatch|Certificate" + EncryptionError, + "IP address mismatch|wronghost|IPAddressMismatch|Certificate|SSL handshake failed", ): await self.client_encryption_invalid_hostname.create_data_key("aws", key) @@ -2179,7 +2180,8 @@ async def test_02_azure(self): await self.client_encryption_expired.create_data_key("azure", key) # Invalid cert hostname error. with self.assertRaisesRegex( - EncryptionError, "IP address mismatch|wronghost|IPAddressMismatch|Certificate" + EncryptionError, + "IP address mismatch|wronghost|IPAddressMismatch|Certificate|SSL handshake failed", ): await self.client_encryption_invalid_hostname.create_data_key("azure", key) @@ -2196,7 +2198,8 @@ async def test_03_gcp(self): await self.client_encryption_expired.create_data_key("gcp", key) # Invalid cert hostname error. with self.assertRaisesRegex( - EncryptionError, "IP address mismatch|wronghost|IPAddressMismatch|Certificate" + EncryptionError, + "IP address mismatch|wronghost|IPAddressMismatch|Certificate|SSL handshake failed", ): await self.client_encryption_invalid_hostname.create_data_key("gcp", key) @@ -2210,7 +2213,8 @@ async def test_04_kmip(self): await self.client_encryption_expired.create_data_key("kmip") # Invalid cert hostname error. with self.assertRaisesRegex( - EncryptionError, "IP address mismatch|wronghost|IPAddressMismatch|Certificate" + EncryptionError, + "IP address mismatch|wronghost|IPAddressMismatch|Certificate|SSL handshake failed", ): await self.client_encryption_invalid_hostname.create_data_key("kmip") diff --git a/test/test_encryption.py b/test/test_encryption.py index 7b5aa776e6..726463c41e 100644 --- a/test/test_encryption.py +++ b/test/test_encryption.py @@ -2154,7 +2154,8 @@ def test_01_aws(self): # 127.0.0.1:9001: ('Certificate does not contain any `subjectAltName`s.',) key["endpoint"] = "127.0.0.1:9001" with self.assertRaisesRegex( - EncryptionError, "IP address mismatch|wronghost|IPAddressMismatch|Certificate" + EncryptionError, + "IP address mismatch|wronghost|IPAddressMismatch|Certificate|SSL handshake failed", ): self.client_encryption_invalid_hostname.create_data_key("aws", key) @@ -2171,7 +2172,8 @@ def test_02_azure(self): self.client_encryption_expired.create_data_key("azure", key) # Invalid cert hostname error. with self.assertRaisesRegex( - EncryptionError, "IP address mismatch|wronghost|IPAddressMismatch|Certificate" + EncryptionError, + "IP address mismatch|wronghost|IPAddressMismatch|Certificate|SSL handshake failed", ): self.client_encryption_invalid_hostname.create_data_key("azure", key) @@ -2188,7 +2190,8 @@ def test_03_gcp(self): self.client_encryption_expired.create_data_key("gcp", key) # Invalid cert hostname error. with self.assertRaisesRegex( - EncryptionError, "IP address mismatch|wronghost|IPAddressMismatch|Certificate" + EncryptionError, + "IP address mismatch|wronghost|IPAddressMismatch|Certificate|SSL handshake failed", ): self.client_encryption_invalid_hostname.create_data_key("gcp", key) @@ -2202,7 +2205,8 @@ def test_04_kmip(self): self.client_encryption_expired.create_data_key("kmip") # Invalid cert hostname error. with self.assertRaisesRegex( - EncryptionError, "IP address mismatch|wronghost|IPAddressMismatch|Certificate" + EncryptionError, + "IP address mismatch|wronghost|IPAddressMismatch|Certificate|SSL handshake failed", ): self.client_encryption_invalid_hostname.create_data_key("kmip") From b9f4f796f1af1787b3fcf6d918abdd603d6d41e9 Mon Sep 17 00:00:00 2001 From: Noah Stapp Date: Mon, 13 Jan 2025 09:16:28 -0500 Subject: [PATCH 151/182] Revert "PYTHON-4915 - Add guidance on adding _id fields to documents to CRUD spec, reorder client.bulk_write generated _id fields" (#2055) Co-authored-by: Steven Silvester --- pymongo/message.py | 13 +-- test/asynchronous/test_client_bulk_write.py | 14 --- test/mockupdb/test_id_ordering.py | 94 --------------------- test/test_client_bulk_write.py | 14 --- 4 files changed, 1 insertion(+), 134 deletions(-) delete mode 100644 test/mockupdb/test_id_ordering.py diff --git a/pymongo/message.py b/pymongo/message.py index b6c00f06cb..10c9edb5cd 100644 --- a/pymongo/message.py +++ b/pymongo/message.py @@ -24,7 +24,6 @@ import datetime import random import struct -from collections import ChainMap from io import BytesIO as _BytesIO from typing import ( TYPE_CHECKING, @@ -1116,18 +1115,8 @@ def _check_doc_size_limits( # key and the index of its namespace within ns_info as its value. op_doc[op_type] = ns_info[namespace] # type: ignore[index] - # Since the data document itself is nested within the insert document - # it won't be automatically re-ordered by the BSON conversion. - # We use ChainMap here to make the _id field the first field instead. - doc_to_encode = op_doc - if real_op_type == "insert": - doc = op_doc["document"] - if not isinstance(doc, RawBSONDocument): - doc_to_encode = op_doc.copy() # type: ignore[attr-defined] # Shallow copy - doc_to_encode["document"] = ChainMap(doc, {"_id": doc["_id"]}) # type: ignore[index] - # Encode current operation doc and, if newly added, namespace doc. - op_doc_encoded = _dict_to_bson(doc_to_encode, False, opts) + op_doc_encoded = _dict_to_bson(op_doc, False, opts) op_length = len(op_doc_encoded) if ns_doc: ns_doc_encoded = _dict_to_bson(ns_doc, False, opts) diff --git a/test/asynchronous/test_client_bulk_write.py b/test/asynchronous/test_client_bulk_write.py index a82629f495..282009f554 100644 --- a/test/asynchronous/test_client_bulk_write.py +++ b/test/asynchronous/test_client_bulk_write.py @@ -18,9 +18,6 @@ import os import sys -from bson import encode -from bson.raw_bson import RawBSONDocument - sys.path[0:0] = [""] from test.asynchronous import ( @@ -87,17 +84,6 @@ async def test_formats_write_error_correctly(self): self.assertEqual(write_error["idx"], 1) self.assertEqual(write_error["op"], {"insert": 0, "document": {"_id": 1}}) - @async_client_context.require_version_min(8, 0, 0, -24) - @async_client_context.require_no_serverless - async def test_raw_bson_not_inflated(self): - doc = RawBSONDocument(encode({"a": "b" * 100})) - models = [ - InsertOne(namespace="db.coll", document=doc), - ] - await self.client.bulk_write(models=models) - - self.assertIsNone(doc._RawBSONDocument__inflated_doc) - # https://github.com/mongodb/specifications/tree/master/source/crud/tests class TestClientBulkWriteCRUD(AsyncIntegrationTest): diff --git a/test/mockupdb/test_id_ordering.py b/test/mockupdb/test_id_ordering.py deleted file mode 100644 index 7e2c91d592..0000000000 --- a/test/mockupdb/test_id_ordering.py +++ /dev/null @@ -1,94 +0,0 @@ -# Copyright 2024-present MongoDB, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import annotations - -from test import PyMongoTestCase - -import pytest - -from pymongo import InsertOne - -try: - from mockupdb import MockupDB, OpMsg, go, going - - _HAVE_MOCKUPDB = True -except ImportError: - _HAVE_MOCKUPDB = False - - -from bson.objectid import ObjectId - -pytestmark = pytest.mark.mockupdb - - -# https://github.com/mongodb/specifications/blob/master/source/crud/tests/README.md#16-generated-document-identifiers-are-the-first-field-in-their-document -class TestIdOrdering(PyMongoTestCase): - def test_16_generated_document_ids_are_first_field(self): - server = MockupDB() - server.autoresponds( - "hello", - isWritablePrimary=True, - msg="isdbgrid", - minWireVersion=0, - maxWireVersion=25, - helloOk=True, - serviceId=ObjectId(), - ) - server.run() - self.addCleanup(server.stop) - - # We also verify that the original document contains an _id field after each insert - document = {"x": 1} - - client = self.simple_client(server.uri, loadBalanced=True) - collection = client.db.coll - with going(collection.insert_one, document): - request = server.receives() - self.assertEqual("_id", next(iter(request["documents"][0]))) - request.reply({"ok": 1}) - self.assertIn("_id", document) - - document = {"x1": 1} - - with going(collection.bulk_write, [InsertOne(document)]): - request = server.receives() - self.assertEqual("_id", next(iter(request["documents"][0]))) - request.reply({"ok": 1}) - self.assertIn("_id", document) - - document = {"x2": 1} - with going(client.bulk_write, [InsertOne(namespace="db.coll", document=document)]): - request = server.receives() - self.assertEqual("_id", next(iter(request["ops"][0]["document"]))) - request.reply({"ok": 1}) - self.assertIn("_id", document) - - # Re-ordering user-supplied _id fields is not required by the spec, but PyMongo does it for performance reasons - with going(collection.insert_one, {"x": 1, "_id": 111}): - request = server.receives() - self.assertEqual("_id", next(iter(request["documents"][0]))) - request.reply({"ok": 1}) - - with going(collection.bulk_write, [InsertOne({"x1": 1, "_id": 1111})]): - request = server.receives() - self.assertEqual("_id", next(iter(request["documents"][0]))) - request.reply({"ok": 1}) - - with going( - client.bulk_write, [InsertOne(namespace="db.coll", document={"x2": 1, "_id": 11111})] - ): - request = server.receives() - self.assertEqual("_id", next(iter(request["ops"][0]["document"]))) - request.reply({"ok": 1}) diff --git a/test/test_client_bulk_write.py b/test/test_client_bulk_write.py index c1cc27c28a..f8d92668ea 100644 --- a/test/test_client_bulk_write.py +++ b/test/test_client_bulk_write.py @@ -18,9 +18,6 @@ import os import sys -from bson import encode -from bson.raw_bson import RawBSONDocument - sys.path[0:0] = [""] from test import ( @@ -87,17 +84,6 @@ def test_formats_write_error_correctly(self): self.assertEqual(write_error["idx"], 1) self.assertEqual(write_error["op"], {"insert": 0, "document": {"_id": 1}}) - @client_context.require_version_min(8, 0, 0, -24) - @client_context.require_no_serverless - def test_raw_bson_not_inflated(self): - doc = RawBSONDocument(encode({"a": "b" * 100})) - models = [ - InsertOne(namespace="db.coll", document=doc), - ] - self.client.bulk_write(models=models) - - self.assertIsNone(doc._RawBSONDocument__inflated_doc) - # https://github.com/mongodb/specifications/tree/master/source/crud/tests class TestClientBulkWriteCRUD(IntegrationTest): From ecf7ac77702e0ea1710e4d06082f618999ba3398 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Mon, 13 Jan 2025 20:34:58 -0600 Subject: [PATCH 152/182] PYTHON-5013 Add NULL checks in InvalidDocument bson handling (#2049) --- bson/_cbsonmodule.c | 89 ++++++++++++++++++++++++++++----------------- test/test_bson.py | 28 ++++++++++++++ 2 files changed, 83 insertions(+), 34 deletions(-) diff --git a/bson/_cbsonmodule.c b/bson/_cbsonmodule.c index d91c7e0536..672f5eeda5 100644 --- a/bson/_cbsonmodule.c +++ b/bson/_cbsonmodule.c @@ -1644,6 +1644,56 @@ static int write_raw_doc(buffer_t buffer, PyObject* raw, PyObject* _raw_str) { return bytes_written; } + +/* Update Invalid Document error message to include doc. + */ +void handle_invalid_doc_error(PyObject* dict) { + PyObject *etype = NULL, *evalue = NULL, *etrace = NULL; + PyObject *msg = NULL, *dict_str = NULL, *new_msg = NULL; + PyErr_Fetch(&etype, &evalue, &etrace); + PyObject *InvalidDocument = _error("InvalidDocument"); + if (InvalidDocument == NULL) { + goto cleanup; + } + + if (evalue && PyErr_GivenExceptionMatches(etype, InvalidDocument)) { + PyObject *msg = PyObject_Str(evalue); + if (msg) { + // Prepend doc to the existing message + PyObject *dict_str = PyObject_Str(dict); + if (dict_str == NULL) { + goto cleanup; + } + const char * dict_str_utf8 = PyUnicode_AsUTF8(dict_str); + if (dict_str_utf8 == NULL) { + goto cleanup; + } + const char * msg_utf8 = PyUnicode_AsUTF8(msg); + if (msg_utf8 == NULL) { + goto cleanup; + } + PyObject *new_msg = PyUnicode_FromFormat("Invalid document %s | %s", dict_str_utf8, msg_utf8); + Py_DECREF(evalue); + Py_DECREF(etype); + etype = InvalidDocument; + InvalidDocument = NULL; + if (new_msg) { + evalue = new_msg; + } else { + evalue = msg; + } + } + PyErr_NormalizeException(&etype, &evalue, &etrace); + } +cleanup: + PyErr_Restore(etype, evalue, etrace); + Py_XDECREF(msg); + Py_XDECREF(InvalidDocument); + Py_XDECREF(dict_str); + Py_XDECREF(new_msg); +} + + /* returns the number of bytes written or 0 on failure */ int write_dict(PyObject* self, buffer_t buffer, PyObject* dict, unsigned char check_keys, @@ -1743,40 +1793,8 @@ int write_dict(PyObject* self, buffer_t buffer, while (PyDict_Next(dict, &pos, &key, &value)) { if (!decode_and_write_pair(self, buffer, key, value, check_keys, options, top_level)) { - if (PyErr_Occurred()) { - PyObject *etype = NULL, *evalue = NULL, *etrace = NULL; - PyErr_Fetch(&etype, &evalue, &etrace); - PyObject *InvalidDocument = _error("InvalidDocument"); - - if (top_level && InvalidDocument && PyErr_GivenExceptionMatches(etype, InvalidDocument)) { - - Py_DECREF(etype); - etype = InvalidDocument; - - if (evalue) { - PyObject *msg = PyObject_Str(evalue); - Py_DECREF(evalue); - - if (msg) { - // Prepend doc to the existing message - PyObject *dict_str = PyObject_Str(dict); - PyObject *new_msg = PyUnicode_FromFormat("Invalid document %s | %s", PyUnicode_AsUTF8(dict_str), PyUnicode_AsUTF8(msg)); - Py_DECREF(dict_str); - - if (new_msg) { - evalue = new_msg; - } - else { - evalue = msg; - } - } - } - PyErr_NormalizeException(&etype, &evalue, &etrace); - } - else { - Py_DECREF(InvalidDocument); - } - PyErr_Restore(etype, evalue, etrace); + if (PyErr_Occurred() && top_level) { + handle_invalid_doc_error(dict); } return 0; } @@ -1796,6 +1814,9 @@ int write_dict(PyObject* self, buffer_t buffer, } if (!decode_and_write_pair(self, buffer, key, value, check_keys, options, top_level)) { + if (PyErr_Occurred() && top_level) { + handle_invalid_doc_error(dict); + } Py_DECREF(key); Py_DECREF(value); Py_DECREF(iter); diff --git a/test/test_bson.py b/test/test_bson.py index e550b538d3..e601be4915 100644 --- a/test/test_bson.py +++ b/test/test_bson.py @@ -1112,6 +1112,34 @@ def __repr__(self): with self.assertRaisesRegex(InvalidDocument, f"Invalid document {doc}"): encode(doc) + def test_doc_in_invalid_document_error_message_mapping(self): + class MyMapping(abc.Mapping): + def keys(): + return ["t"] + + def __getitem__(self, name): + if name == "_id": + return None + return Wrapper(name) + + def __len__(self): + return 1 + + def __iter__(self): + return iter(["t"]) + + class Wrapper: + def __init__(self, val): + self.val = val + + def __repr__(self): + return repr(self.val) + + self.assertEqual("1", repr(Wrapper(1))) + doc = MyMapping() + with self.assertRaisesRegex(InvalidDocument, f"Invalid document {doc}"): + encode(doc) + class TestCodecOptions(unittest.TestCase): def test_document_class(self): From 069ebf3e13770e43e0c95f576b6c026209d1549c Mon Sep 17 00:00:00 2001 From: Noah Stapp Date: Wed, 15 Jan 2025 07:54:52 -0500 Subject: [PATCH 153/182] PYTHON-5037 - Update pyopenssl_context.__get_options type hint (#2060) --- pymongo/pyopenssl_context.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pymongo/pyopenssl_context.py b/pymongo/pyopenssl_context.py index a320e94929..8c643394b2 100644 --- a/pymongo/pyopenssl_context.py +++ b/pymongo/pyopenssl_context.py @@ -273,7 +273,7 @@ def __set_check_ocsp_endpoint(self, value: bool) -> None: check_ocsp_endpoint = property(__get_check_ocsp_endpoint, __set_check_ocsp_endpoint) - def __get_options(self) -> None: + def __get_options(self) -> int: # Calling set_options adds the option to the existing bitmask and # returns the new bitmask. # https://www.pyopenssl.org/en/stable/api/ssl.html#OpenSSL.SSL.Context.set_options From f8bd891df4415e9d793c5146a1126cb5a4c14141 Mon Sep 17 00:00:00 2001 From: Noah Stapp Date: Wed, 15 Jan 2025 09:53:45 -0500 Subject: [PATCH 154/182] =?UTF-8?q?PYTHON-5039=20-=20Always=20use=20asynci?= =?UTF-8?q?o.get=5Frunning=5Floop()=20instead=20of=20asynci=E2=80=A6=20(#2?= =?UTF-8?q?063)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- pymongo/network_layer.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pymongo/network_layer.py b/pymongo/network_layer.py index 4d21300bc6..c1db31f89c 100644 --- a/pymongo/network_layer.py +++ b/pymongo/network_layer.py @@ -72,7 +72,7 @@ async def async_sendall(sock: Union[socket.socket, _sslConn], buf: bytes) -> None: timeout = sock.gettimeout() sock.settimeout(0.0) - loop = asyncio.get_event_loop() + loop = asyncio.get_running_loop() try: if _HAVE_SSL and isinstance(sock, (SSLSocket, _sslConn)): await asyncio.wait_for(_async_sendall_ssl(sock, buf, loop), timeout=timeout) @@ -259,7 +259,7 @@ async def async_receive_data( timeout = sock_timeout sock.settimeout(0.0) - loop = asyncio.get_event_loop() + loop = asyncio.get_running_loop() cancellation_task = create_task(_poll_cancellation(conn)) try: if _HAVE_SSL and isinstance(sock, (SSLSocket, _sslConn)): @@ -290,7 +290,7 @@ async def async_receive_data_socket( timeout = sock_timeout sock.settimeout(0.0) - loop = asyncio.get_event_loop() + loop = asyncio.get_running_loop() try: if _HAVE_SSL and isinstance(sock, (SSLSocket, _sslConn)): return await asyncio.wait_for( From 820701f15ad9a41d0bab01f69291b368798a512d Mon Sep 17 00:00:00 2001 From: Shane Harvey Date: Wed, 15 Jan 2025 12:23:12 -0800 Subject: [PATCH 155/182] PYTHON-5038 Resolve certificate verify failed: Missing Authority Key Identifier (#2062) --- test/asynchronous/test_encryption.py | 2 ++ test/test_encryption.py | 2 ++ 2 files changed, 4 insertions(+) diff --git a/test/asynchronous/test_encryption.py b/test/asynchronous/test_encryption.py index ef53d8ccd5..2b22bd8b76 100644 --- a/test/asynchronous/test_encryption.py +++ b/test/asynchronous/test_encryption.py @@ -2884,6 +2884,8 @@ async def http_post(self, path, data=None): # each request because the server is single threaded. ctx = ssl.create_default_context(cafile=CA_PEM) ctx.load_cert_chain(CLIENT_PEM) + ctx.check_hostname = False + ctx.verify_mode = ssl.CERT_NONE conn = http.client.HTTPSConnection("127.0.0.1:9003", context=ctx) try: if data is not None: diff --git a/test/test_encryption.py b/test/test_encryption.py index 726463c41e..9224310144 100644 --- a/test/test_encryption.py +++ b/test/test_encryption.py @@ -2866,6 +2866,8 @@ def http_post(self, path, data=None): # each request because the server is single threaded. ctx = ssl.create_default_context(cafile=CA_PEM) ctx.load_cert_chain(CLIENT_PEM) + ctx.check_hostname = False + ctx.verify_mode = ssl.CERT_NONE conn = http.client.HTTPSConnection("127.0.0.1:9003", context=ctx) try: if data is not None: From 8fa6750a7e077c33f304e72f3e17467ced9cd224 Mon Sep 17 00:00:00 2001 From: Shane Harvey Date: Thu, 16 Jan 2025 12:14:25 -0800 Subject: [PATCH 156/182] PYTHON-5042 Resync transaction spec tests (#2058) --- .../unified/commit-retry.json | 5 ----- .../unified/commit-writeconcernerror.json | 17 +---------------- .../transactions/unified/findOneAndReplace.json | 8 ++++++-- test/transactions/unified/findOneAndUpdate.json | 16 ++++++++++++---- .../unified/mongos-recovery-token.json | 6 ++++-- test/transactions/unified/pin-mongos.json | 6 ++++-- test/transactions/unified/retryable-commit.json | 5 ----- test/transactions/unified/write-concern.json | 8 ++++++-- 8 files changed, 33 insertions(+), 38 deletions(-) diff --git a/test/transactions-convenient-api/unified/commit-retry.json b/test/transactions-convenient-api/unified/commit-retry.json index cc80201167..928f0167e4 100644 --- a/test/transactions-convenient-api/unified/commit-retry.json +++ b/test/transactions-convenient-api/unified/commit-retry.json @@ -422,11 +422,6 @@ }, { "description": "commit is not retried after MaxTimeMSExpired error", - "runOnRequirements": [ - { - "serverless": "forbid" - } - ], "operations": [ { "name": "failPoint", diff --git a/test/transactions-convenient-api/unified/commit-writeconcernerror.json b/test/transactions-convenient-api/unified/commit-writeconcernerror.json index a455a450bf..a6f6e6bd7f 100644 --- a/test/transactions-convenient-api/unified/commit-writeconcernerror.json +++ b/test/transactions-convenient-api/unified/commit-writeconcernerror.json @@ -1,6 +1,6 @@ { "description": "commit-writeconcernerror", - "schemaVersion": "1.4", + "schemaVersion": "1.3", "runOnRequirements": [ { "minServerVersion": "4.0", @@ -414,11 +414,6 @@ }, { "description": "commitTransaction is not retried after UnknownReplWriteConcern error", - "runOnRequirements": [ - { - "serverless": "forbid" - } - ], "operations": [ { "name": "failPoint", @@ -551,11 +546,6 @@ }, { "description": "commitTransaction is not retried after UnsatisfiableWriteConcern error", - "runOnRequirements": [ - { - "serverless": "forbid" - } - ], "operations": [ { "name": "failPoint", @@ -688,11 +678,6 @@ }, { "description": "commitTransaction is not retried after MaxTimeMSExpired error", - "runOnRequirements": [ - { - "serverless": "forbid" - } - ], "operations": [ { "name": "failPoint", diff --git a/test/transactions/unified/findOneAndReplace.json b/test/transactions/unified/findOneAndReplace.json index d9248244b3..f0742f0c60 100644 --- a/test/transactions/unified/findOneAndReplace.json +++ b/test/transactions/unified/findOneAndReplace.json @@ -127,7 +127,9 @@ "update": { "x": 1 }, - "new": false, + "new": { + "$$unsetOrMatches": false + }, "lsid": { "$$sessionLsid": "session0" }, @@ -299,7 +301,9 @@ "update": { "x": 1 }, - "new": false, + "new": { + "$$unsetOrMatches": false + }, "lsid": { "$$sessionLsid": "session0" }, diff --git a/test/transactions/unified/findOneAndUpdate.json b/test/transactions/unified/findOneAndUpdate.json index 34a40bb570..f5308efef3 100644 --- a/test/transactions/unified/findOneAndUpdate.json +++ b/test/transactions/unified/findOneAndUpdate.json @@ -189,7 +189,9 @@ "x": 1 } }, - "new": false, + "new": { + "$$unsetOrMatches": false + }, "lsid": { "$$sessionLsid": "session0" }, @@ -281,7 +283,9 @@ "x": 1 } }, - "new": false, + "new": { + "$$unsetOrMatches": false + }, "lsid": { "$$sessionLsid": "session0" }, @@ -340,7 +344,9 @@ "x": 1 } }, - "new": false, + "new": { + "$$unsetOrMatches": false + }, "lsid": { "$$sessionLsid": "session0" }, @@ -485,7 +491,9 @@ "x": 1 } }, - "new": false, + "new": { + "$$unsetOrMatches": false + }, "lsid": { "$$sessionLsid": "session0" }, diff --git a/test/transactions/unified/mongos-recovery-token.json b/test/transactions/unified/mongos-recovery-token.json index 00909c4218..bb88aa16bd 100644 --- a/test/transactions/unified/mongos-recovery-token.json +++ b/test/transactions/unified/mongos-recovery-token.json @@ -232,7 +232,8 @@ "id": "client1", "useMultipleMongoses": true, "uriOptions": { - "heartbeatFrequencyMS": 30000 + "heartbeatFrequencyMS": 30000, + "appName": "transactionsClient" }, "observeEvents": [ "commandStartedEvent" @@ -299,7 +300,8 @@ "isMaster", "hello" ], - "closeConnection": true + "closeConnection": true, + "appName": "transactionsClient" } } } diff --git a/test/transactions/unified/pin-mongos.json b/test/transactions/unified/pin-mongos.json index 5f2ecca5c1..c96f3f341f 100644 --- a/test/transactions/unified/pin-mongos.json +++ b/test/transactions/unified/pin-mongos.json @@ -1249,7 +1249,8 @@ "id": "client1", "useMultipleMongoses": true, "uriOptions": { - "heartbeatFrequencyMS": 30000 + "heartbeatFrequencyMS": 30000, + "appName": "transactionsClient" }, "observeEvents": [ "commandStartedEvent" @@ -1316,7 +1317,8 @@ "isMaster", "hello" ], - "closeConnection": true + "closeConnection": true, + "appName": "transactionsClient" } } } diff --git a/test/transactions/unified/retryable-commit.json b/test/transactions/unified/retryable-commit.json index 7d7e52495d..b794c1c55c 100644 --- a/test/transactions/unified/retryable-commit.json +++ b/test/transactions/unified/retryable-commit.json @@ -89,11 +89,6 @@ "tests": [ { "description": "commitTransaction fails after Interrupted", - "runOnRequirements": [ - { - "serverless": "forbid" - } - ], "operations": [ { "object": "testRunner", diff --git a/test/transactions/unified/write-concern.json b/test/transactions/unified/write-concern.json index 7acdd54066..29d1977a82 100644 --- a/test/transactions/unified/write-concern.json +++ b/test/transactions/unified/write-concern.json @@ -1417,7 +1417,9 @@ "update": { "x": 1 }, - "new": false, + "new": { + "$$unsetOrMatches": false + }, "lsid": { "$$sessionLsid": "session0" }, @@ -1522,7 +1524,9 @@ "x": 1 } }, - "new": false, + "new": { + "$$unsetOrMatches": false + }, "lsid": { "$$sessionLsid": "session0" }, From e4d84494c321f2532c93cf4d5cc815311f396e03 Mon Sep 17 00:00:00 2001 From: Noah Stapp Date: Fri, 17 Jan 2025 09:46:48 -0500 Subject: [PATCH 157/182] PYTHON-5021 - Fix usages of getaddrinfo to be non-blocking (#2059) --- pymongo/asynchronous/auth.py | 18 +++++++++++++----- pymongo/asynchronous/helpers.py | 20 ++++++++++++++++++++ pymongo/asynchronous/pool.py | 8 ++++---- pymongo/synchronous/auth.py | 14 +++++++++++--- pymongo/synchronous/helpers.py | 20 ++++++++++++++++++++ pymongo/synchronous/pool.py | 4 ++-- test/asynchronous/test_auth.py | 4 ++-- 7 files changed, 72 insertions(+), 16 deletions(-) diff --git a/pymongo/asynchronous/auth.py b/pymongo/asynchronous/auth.py index 48ce4bbd39..b1e6d0125b 100644 --- a/pymongo/asynchronous/auth.py +++ b/pymongo/asynchronous/auth.py @@ -38,6 +38,7 @@ _authenticate_oidc, _get_authenticator, ) +from pymongo.asynchronous.helpers import _getaddrinfo from pymongo.auth_shared import ( MongoCredential, _authenticate_scram_start, @@ -177,15 +178,22 @@ def _auth_key(nonce: str, username: str, password: str) -> str: return md5hash.hexdigest() -def _canonicalize_hostname(hostname: str, option: str | bool) -> str: +async def _canonicalize_hostname(hostname: str, option: str | bool) -> str: """Canonicalize hostname following MIT-krb5 behavior.""" # https://github.com/krb5/krb5/blob/d406afa363554097ac48646a29249c04f498c88e/src/util/k5test.py#L505-L520 if option in [False, "none"]: return hostname - af, socktype, proto, canonname, sockaddr = socket.getaddrinfo( - hostname, None, 0, 0, socket.IPPROTO_TCP, socket.AI_CANONNAME - )[0] + af, socktype, proto, canonname, sockaddr = ( + await _getaddrinfo( + hostname, + None, + family=0, + type=0, + proto=socket.IPPROTO_TCP, + flags=socket.AI_CANONNAME, + ) + )[0] # type: ignore[index] # For forward just to resolve the cname as dns.lookup() will not return it. if option == "forward": @@ -213,7 +221,7 @@ async def _authenticate_gssapi(credentials: MongoCredential, conn: AsyncConnecti # Starting here and continuing through the while loop below - establish # the security context. See RFC 4752, Section 3.1, first paragraph. host = props.service_host or conn.address[0] - host = _canonicalize_hostname(host, props.canonicalize_host_name) + host = await _canonicalize_hostname(host, props.canonicalize_host_name) service = props.service_name + "@" + host if props.service_realm is not None: service = service + "@" + props.service_realm diff --git a/pymongo/asynchronous/helpers.py b/pymongo/asynchronous/helpers.py index 1ac8b6630f..d519e8749c 100644 --- a/pymongo/asynchronous/helpers.py +++ b/pymongo/asynchronous/helpers.py @@ -15,7 +15,9 @@ """Miscellaneous pieces that need to be synchronized.""" from __future__ import annotations +import asyncio import builtins +import socket import sys from typing import ( Any, @@ -68,6 +70,24 @@ async def inner(*args: Any, **kwargs: Any) -> Any: return cast(F, inner) +async def _getaddrinfo( + host: Any, port: Any, **kwargs: Any +) -> list[ + tuple[ + socket.AddressFamily, + socket.SocketKind, + int, + str, + tuple[str, int] | tuple[str, int, int, int], + ] +]: + if not _IS_SYNC: + loop = asyncio.get_running_loop() + return await loop.getaddrinfo(host, port, **kwargs) # type: ignore[return-value] + else: + return socket.getaddrinfo(host, port, **kwargs) + + if sys.version_info >= (3, 10): anext = builtins.anext aiter = builtins.aiter diff --git a/pymongo/asynchronous/pool.py b/pymongo/asynchronous/pool.py index 5dc5675a0a..bf2f2b4946 100644 --- a/pymongo/asynchronous/pool.py +++ b/pymongo/asynchronous/pool.py @@ -40,7 +40,7 @@ from bson import DEFAULT_CODEC_OPTIONS from pymongo import _csot, helpers_shared from pymongo.asynchronous.client_session import _validate_session_write_concern -from pymongo.asynchronous.helpers import _handle_reauth +from pymongo.asynchronous.helpers import _getaddrinfo, _handle_reauth from pymongo.asynchronous.network import command, receive_message from pymongo.common import ( MAX_BSON_SIZE, @@ -783,7 +783,7 @@ def __repr__(self) -> str: ) -def _create_connection(address: _Address, options: PoolOptions) -> socket.socket: +async def _create_connection(address: _Address, options: PoolOptions) -> socket.socket: """Given (host, port) and PoolOptions, connect and return a socket object. Can raise socket.error. @@ -814,7 +814,7 @@ def _create_connection(address: _Address, options: PoolOptions) -> socket.socket family = socket.AF_UNSPEC err = None - for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM): + for res in await _getaddrinfo(host, port, family=family, type=socket.SOCK_STREAM): # type: ignore[attr-defined] af, socktype, proto, dummy, sa = res # SOCK_CLOEXEC was new in CPython 3.2, and only available on a limited # number of platforms (newer Linux and *BSD). Starting with CPython 3.4 @@ -863,7 +863,7 @@ async def _configured_socket( Sets socket's SSL and timeout options. """ - sock = _create_connection(address, options) + sock = await _create_connection(address, options) ssl_context = options._ssl_context if ssl_context is None: diff --git a/pymongo/synchronous/auth.py b/pymongo/synchronous/auth.py index 0e51ff8b7f..56860eff3b 100644 --- a/pymongo/synchronous/auth.py +++ b/pymongo/synchronous/auth.py @@ -45,6 +45,7 @@ _authenticate_oidc, _get_authenticator, ) +from pymongo.synchronous.helpers import _getaddrinfo if TYPE_CHECKING: from pymongo.hello import Hello @@ -180,9 +181,16 @@ def _canonicalize_hostname(hostname: str, option: str | bool) -> str: if option in [False, "none"]: return hostname - af, socktype, proto, canonname, sockaddr = socket.getaddrinfo( - hostname, None, 0, 0, socket.IPPROTO_TCP, socket.AI_CANONNAME - )[0] + af, socktype, proto, canonname, sockaddr = ( + _getaddrinfo( + hostname, + None, + family=0, + type=0, + proto=socket.IPPROTO_TCP, + flags=socket.AI_CANONNAME, + ) + )[0] # type: ignore[index] # For forward just to resolve the cname as dns.lookup() will not return it. if option == "forward": diff --git a/pymongo/synchronous/helpers.py b/pymongo/synchronous/helpers.py index 064583dad3..f800e7dcc8 100644 --- a/pymongo/synchronous/helpers.py +++ b/pymongo/synchronous/helpers.py @@ -15,7 +15,9 @@ """Miscellaneous pieces that need to be synchronized.""" from __future__ import annotations +import asyncio import builtins +import socket import sys from typing import ( Any, @@ -68,6 +70,24 @@ def inner(*args: Any, **kwargs: Any) -> Any: return cast(F, inner) +def _getaddrinfo( + host: Any, port: Any, **kwargs: Any +) -> list[ + tuple[ + socket.AddressFamily, + socket.SocketKind, + int, + str, + tuple[str, int] | tuple[str, int, int, int], + ] +]: + if not _IS_SYNC: + loop = asyncio.get_running_loop() + return loop.getaddrinfo(host, port, **kwargs) # type: ignore[return-value] + else: + return socket.getaddrinfo(host, port, **kwargs) + + if sys.version_info >= (3, 10): next = builtins.next iter = builtins.iter diff --git a/pymongo/synchronous/pool.py b/pymongo/synchronous/pool.py index 1a155c82d7..05f930d480 100644 --- a/pymongo/synchronous/pool.py +++ b/pymongo/synchronous/pool.py @@ -84,7 +84,7 @@ from pymongo.socket_checker import SocketChecker from pymongo.ssl_support import HAS_SNI, SSLError from pymongo.synchronous.client_session import _validate_session_write_concern -from pymongo.synchronous.helpers import _handle_reauth +from pymongo.synchronous.helpers import _getaddrinfo, _handle_reauth from pymongo.synchronous.network import command, receive_message if TYPE_CHECKING: @@ -812,7 +812,7 @@ def _create_connection(address: _Address, options: PoolOptions) -> socket.socket family = socket.AF_UNSPEC err = None - for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM): + for res in _getaddrinfo(host, port, family=family, type=socket.SOCK_STREAM): # type: ignore[attr-defined] af, socktype, proto, dummy, sa = res # SOCK_CLOEXEC was new in CPython 3.2, and only available on a limited # number of platforms (newer Linux and *BSD). Starting with CPython 3.4 diff --git a/test/asynchronous/test_auth.py b/test/asynchronous/test_auth.py index 08dc4d7247..7172152d69 100644 --- a/test/asynchronous/test_auth.py +++ b/test/asynchronous/test_auth.py @@ -275,10 +275,10 @@ async def test_gssapi_threaded(self): async def test_gssapi_canonicalize_host_name(self): # Test the low level method. assert GSSAPI_HOST is not None - result = _canonicalize_hostname(GSSAPI_HOST, "forward") + result = await _canonicalize_hostname(GSSAPI_HOST, "forward") if "compute-1.amazonaws.com" not in result: self.assertEqual(result, GSSAPI_HOST) - result = _canonicalize_hostname(GSSAPI_HOST, "forwardAndReverse") + result = await _canonicalize_hostname(GSSAPI_HOST, "forwardAndReverse") self.assertEqual(result, GSSAPI_HOST) # Use the equivalent named CANONICALIZE_HOST_NAME. From 86084adb29537bd6c432913346f3d14b59c90a69 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Fri, 17 Jan 2025 14:48:01 -0600 Subject: [PATCH 158/182] PYTHON-4975 Use justfile as the task runner (#2057) Co-authored-by: Noah Stapp --- .evergreen/combine-coverage.sh | 2 +- .evergreen/hatch.sh | 5 -- .evergreen/install-dependencies.sh | 27 +++++++ .evergreen/just.sh | 5 ++ .evergreen/run-azurekms-fail-test.sh | 2 +- .evergreen/run-azurekms-test.sh | 2 +- .evergreen/run-gcpkms-test.sh | 2 +- .evergreen/run-import-time-test.sh | 2 +- .evergreen/run-mongodb-aws-ecs-test.sh | 2 +- .evergreen/run-mongodb-oidc-test.sh | 2 +- .evergreen/run-perf-tests.sh | 2 +- .evergreen/scripts/cleanup.sh | 2 +- .evergreen/scripts/configure-env.sh | 8 ++- .evergreen/scripts/ensure-hatch.sh | 59 --------------- .evergreen/scripts/install-dependencies.sh | 2 +- .evergreen/scripts/run-atlas-tests.sh | 2 +- .evergreen/scripts/run-doctests.sh | 2 +- .../scripts/run-enterprise-auth-tests.sh | 2 +- .evergreen/scripts/run-gcpkms-fail-test.sh | 2 +- .evergreen/scripts/run-mockupdb-tests.sh | 2 +- .evergreen/scripts/run-mongodb-aws-test.sh | 2 +- .evergreen/scripts/run-ocsp-test.sh | 2 +- .evergreen/scripts/run-tests.sh | 2 +- .evergreen/scripts/setup-dev-env.sh | 72 +++++++++++++++++++ .evergreen/scripts/setup-encryption.sh | 2 +- .evergreen/setup-spawn-host.sh | 2 +- .evergreen/utils.sh | 4 +- .github/workflows/test-python.yml | 50 +++++++------ .gitignore | 2 + CONTRIBUTING.md | 37 +++++----- hatch.toml | 13 ---- justfile | 69 ++++++++++++++++++ 32 files changed, 252 insertions(+), 139 deletions(-) delete mode 100755 .evergreen/hatch.sh create mode 100755 .evergreen/just.sh delete mode 100755 .evergreen/scripts/ensure-hatch.sh create mode 100755 .evergreen/scripts/setup-dev-env.sh create mode 100644 justfile diff --git a/.evergreen/combine-coverage.sh b/.evergreen/combine-coverage.sh index 92d2f1f1f8..c31f755bd9 100755 --- a/.evergreen/combine-coverage.sh +++ b/.evergreen/combine-coverage.sh @@ -8,7 +8,7 @@ set -o errexit # Exit the script with error if any of the commands fail . .evergreen/utils.sh -if [ -z "$PYTHON_BINARY" ]; then +if [ -z "${PYTHON_BINARY:-}" ]; then PYTHON_BINARY=$(find_python3) fi diff --git a/.evergreen/hatch.sh b/.evergreen/hatch.sh deleted file mode 100755 index c01dfcd19e..0000000000 --- a/.evergreen/hatch.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash -set -eu - -. .evergreen/scripts/ensure-hatch.sh -hatch run "$@" diff --git a/.evergreen/install-dependencies.sh b/.evergreen/install-dependencies.sh index e6dceb33fc..8773fa2c6d 100755 --- a/.evergreen/install-dependencies.sh +++ b/.evergreen/install-dependencies.sh @@ -13,6 +13,33 @@ else SUDO="sudo" fi +# Install just. +# On Evergreen jobs, "CI" will be set, and we don't want to write to $HOME. +if [ "${CI:-}" == "true" ]; then + BIN_DIR=$DRIVERS_TOOLS_BINARIES +else + BIN_DIR=$HOME/.local/bin +fi +if [ ! -f $BIN_DIR/just ]; then + if [ "Windows_NT" = "${OS:-}" ]; then + TARGET="--target x86_64-pc-windows-msvc" + else + TARGET="" + fi + curl --proto '=https' --tlsv1.2 -sSf https://just.systems/install.sh | bash -s -- $TARGET --to "$BIN_DIR" || { + # CARGO_HOME is defined in configure-env.sh + export CARGO_HOME=${CARGO_HOME:-$HOME/.cargo/} + export RUSTUP_HOME="${CARGO_HOME}/.rustup" + . ${DRIVERS_TOOLS}/.evergreen/install-rust.sh + cargo install just + if [ "Windows_NT" = "${OS:-}" ]; then + mv $CARGO_HOME/just.exe $BIN_DIR/just + else + mv $CARGO_HOME/just $BIN_DIR + fi + } +fi + # Add 'server' and 'hostname_not_in_cert' as a hostnames echo "127.0.0.1 server" | $SUDO tee -a /etc/hosts echo "127.0.0.1 hostname_not_in_cert" | $SUDO tee -a /etc/hosts diff --git a/.evergreen/just.sh b/.evergreen/just.sh new file mode 100755 index 0000000000..bebbca8282 --- /dev/null +++ b/.evergreen/just.sh @@ -0,0 +1,5 @@ +#!/bin/bash +set -eu + +. .evergreen/scripts/setup-dev-env.sh +just "$@" diff --git a/.evergreen/run-azurekms-fail-test.sh b/.evergreen/run-azurekms-fail-test.sh index d99c178fb9..d1117dcb32 100755 --- a/.evergreen/run-azurekms-fail-test.sh +++ b/.evergreen/run-azurekms-fail-test.sh @@ -8,5 +8,5 @@ PYTHON_BINARY=/opt/mongodbtoolchain/v4/bin/python3 \ KEY_NAME="${AZUREKMS_KEYNAME}" \ KEY_VAULT_ENDPOINT="${AZUREKMS_KEYVAULTENDPOINT}" \ SUCCESS=false TEST_FLE_AZURE_AUTO=1 \ - $HERE/hatch.sh test:test-eg + $HERE/just.sh test-eg bash $HERE/teardown-encryption.sh diff --git a/.evergreen/run-azurekms-test.sh b/.evergreen/run-azurekms-test.sh index bb515a9386..d5c332fa8d 100755 --- a/.evergreen/run-azurekms-test.sh +++ b/.evergreen/run-azurekms-test.sh @@ -18,7 +18,7 @@ AZUREKMS_CMD="tar xf mongo-python-driver.tgz" \ $DRIVERS_TOOLS/.evergreen/csfle/azurekms/run-command.sh echo "Untarring file ... end" echo "Running test ... begin" -AZUREKMS_CMD="KEY_NAME=\"$AZUREKMS_KEYNAME\" KEY_VAULT_ENDPOINT=\"$AZUREKMS_KEYVAULTENDPOINT\" SUCCESS=true TEST_FLE_AZURE_AUTO=1 ./.evergreen/hatch.sh test:test-eg" \ +AZUREKMS_CMD="KEY_NAME=\"$AZUREKMS_KEYNAME\" KEY_VAULT_ENDPOINT=\"$AZUREKMS_KEYVAULTENDPOINT\" SUCCESS=true TEST_FLE_AZURE_AUTO=1 ./.evergreen/just.sh test-eg" \ $DRIVERS_TOOLS/.evergreen/csfle/azurekms/run-command.sh echo "Running test ... end" bash $HERE/teardown-encryption.sh diff --git a/.evergreen/run-gcpkms-test.sh b/.evergreen/run-gcpkms-test.sh index 7ccc74b453..4c953584b2 100755 --- a/.evergreen/run-gcpkms-test.sh +++ b/.evergreen/run-gcpkms-test.sh @@ -17,6 +17,6 @@ echo "Untarring file ... begin" GCPKMS_CMD="tar xf mongo-python-driver.tgz" $DRIVERS_TOOLS/.evergreen/csfle/gcpkms/run-command.sh echo "Untarring file ... end" echo "Running test ... begin" -GCPKMS_CMD="SUCCESS=true TEST_FLE_GCP_AUTO=1 ./.evergreen/hatch.sh test:test-eg" $DRIVERS_TOOLS/.evergreen/csfle/gcpkms/run-command.sh +GCPKMS_CMD="SUCCESS=true TEST_FLE_GCP_AUTO=1 ./.evergreen/just.sh test-eg" $DRIVERS_TOOLS/.evergreen/csfle/gcpkms/run-command.sh echo "Running test ... end" bash $HERE/teardown-encryption.sh diff --git a/.evergreen/run-import-time-test.sh b/.evergreen/run-import-time-test.sh index e9f6161bcc..95e3c93d25 100755 --- a/.evergreen/run-import-time-test.sh +++ b/.evergreen/run-import-time-test.sh @@ -5,7 +5,7 @@ set -x . .evergreen/utils.sh -if [ -z "$PYTHON_BINARY" ]; then +if [ -z "${PYTHON_BINARY:-}" ]; then PYTHON_BINARY=$(find_python3) fi diff --git a/.evergreen/run-mongodb-aws-ecs-test.sh b/.evergreen/run-mongodb-aws-ecs-test.sh index 3189a6cc6c..91777be226 100755 --- a/.evergreen/run-mongodb-aws-ecs-test.sh +++ b/.evergreen/run-mongodb-aws-ecs-test.sh @@ -31,4 +31,4 @@ export AUTH="auth" export SET_XTRACE_ON=1 cd src rm -rf .venv -bash .evergreen/hatch.sh test:test-eg +bash .evergreen/just.sh test-eg diff --git a/.evergreen/run-mongodb-oidc-test.sh b/.evergreen/run-mongodb-oidc-test.sh index 22864528c0..46c4f24969 100755 --- a/.evergreen/run-mongodb-oidc-test.sh +++ b/.evergreen/run-mongodb-oidc-test.sh @@ -32,4 +32,4 @@ fi export TEST_AUTH_OIDC=1 export COVERAGE=1 export AUTH="auth" -bash ./.evergreen/hatch.sh test:test-eg -- "${@:1}" +bash ./.evergreen/just.sh test-eg "${@:1}" diff --git a/.evergreen/run-perf-tests.sh b/.evergreen/run-perf-tests.sh index ff8d81a837..e6a51b3297 100755 --- a/.evergreen/run-perf-tests.sh +++ b/.evergreen/run-perf-tests.sh @@ -16,4 +16,4 @@ export OUTPUT_FILE="${PROJECT_DIRECTORY}/results.json" export PYTHON_BINARY=/opt/mongodbtoolchain/v4/bin/python3 export PERF_TEST=1 -bash ./.evergreen/hatch.sh test:test-eg +bash ./.evergreen/just.sh test-eg diff --git a/.evergreen/scripts/cleanup.sh b/.evergreen/scripts/cleanup.sh index 9e583e4f1e..a1fd92f04d 100755 --- a/.evergreen/scripts/cleanup.sh +++ b/.evergreen/scripts/cleanup.sh @@ -1,7 +1,7 @@ #!/bin/bash if [ -f "$DRIVERS_TOOLS"/.evergreen/csfle/secrets-export.sh ]; then - . .evergreen/hatch.sh encryption:teardown + bash .evergreen/teardown-encryption.sh fi rm -rf "${DRIVERS_TOOLS}" || true rm -f ./secrets-export.sh || true diff --git a/.evergreen/scripts/configure-env.sh b/.evergreen/scripts/configure-env.sh index e0c845a333..ebbffcf1db 100755 --- a/.evergreen/scripts/configure-env.sh +++ b/.evergreen/scripts/configure-env.sh @@ -14,12 +14,14 @@ fi PROJECT_DIRECTORY="$(pwd)" DRIVERS_TOOLS="$(dirname $PROJECT_DIRECTORY)/drivers-tools" CARGO_HOME=${CARGO_HOME:-${DRIVERS_TOOLS}/.cargo} +HATCH_CONFIG=$PROJECT_DIRECTORY/hatch_config.toml # Python has cygwin path problems on Windows. Detect prospective mongo-orchestration home directory if [ "Windows_NT" = "${OS:-}" ]; then # Magic variable in cygwin DRIVERS_TOOLS=$(cygpath -m $DRIVERS_TOOLS) PROJECT_DIRECTORY=$(cygpath -m $PROJECT_DIRECTORY) CARGO_HOME=$(cygpath -m $CARGO_HOME) + HATCH_CONFIG=$(cygpath -m "$HATCH_CONFIG") fi SCRIPT_DIR="$PROJECT_DIRECTORY/.evergreen/scripts" @@ -32,15 +34,16 @@ fi export MONGO_ORCHESTRATION_HOME="$DRIVERS_TOOLS/.evergreen/orchestration" export MONGODB_BINARIES="$DRIVERS_TOOLS/mongodb/bin" +export DRIVERS_TOOLS_BINARIES="$DRIVERS_TOOLS/.bin" cat < "$SCRIPT_DIR"/env.sh -set -o errexit export PROJECT_DIRECTORY="$PROJECT_DIRECTORY" export CURRENT_VERSION="$CURRENT_VERSION" export SKIP_LEGACY_SHELL=1 export DRIVERS_TOOLS="$DRIVERS_TOOLS" export MONGO_ORCHESTRATION_HOME="$MONGO_ORCHESTRATION_HOME" export MONGODB_BINARIES="$MONGODB_BINARIES" +export DRIVERS_TOOLS_BINARIES="$DRIVERS_TOOLS_BINARIES" export PROJECT_DIRECTORY="$PROJECT_DIRECTORY" export SETDEFAULTENCODING="${SETDEFAULTENCODING:-}" export SKIP_CSOT_TESTS="${SKIP_CSOT_TESTS:-}" @@ -59,7 +62,8 @@ export skip_ECS_auth_test="${skip_ECS_auth_test:-}" export CARGO_HOME="$CARGO_HOME" export TMPDIR="$MONGO_ORCHESTRATION_HOME/db" -export PATH="$MONGODB_BINARIES:$PATH" +export HATCH_CONFIG="$HATCH_CONFIG" +export PATH="$MONGODB_BINARIES:$DRIVERS_TOOLS_BINARIES:$PATH" # shellcheck disable=SC2154 export PROJECT="${project:-mongo-python-driver}" export PIP_QUIET=1 diff --git a/.evergreen/scripts/ensure-hatch.sh b/.evergreen/scripts/ensure-hatch.sh deleted file mode 100755 index e63d98bb6d..0000000000 --- a/.evergreen/scripts/ensure-hatch.sh +++ /dev/null @@ -1,59 +0,0 @@ -#!/bin/bash - -set -eu - -HERE=$(dirname ${BASH_SOURCE:-$0}) -pushd "$(dirname "$(dirname $HERE)")" > /dev/null - -# Ensure hatch is available. -if [ ! -x "$(command -v hatch)" ]; then - # Install a virtual env with "hatch" - # Ensure there is a python venv. - . .evergreen/utils.sh - - if [ -z "${PYTHON_BINARY:-}" ]; then - PYTHON_BINARY=$(find_python3) - fi - VENV_DIR=.venv - if [ ! -d $VENV_DIR ]; then - echo "Creating virtual environment..." - createvirtualenv "$PYTHON_BINARY" .venv - echo "Creating virtual environment... done." - fi - if [ -f $VENV_DIR/Scripts/activate ]; then - . $VENV_DIR/Scripts/activate - else - . $VENV_DIR/bin/activate - fi - - python --version - - echo "Installing hatch..." - python -m pip install -U pip - python -m pip install hatch || { - # Install rust and try again. - CARGO_HOME=${CARGO_HOME:-${DRIVERS_TOOLS}/.cargo} - # Handle paths on Windows. - if [ "Windows_NT" = "${OS:-}" ]; then # Magic variable in cygwin - CARGO_HOME=$(cygpath -m $CARGO_HOME) - fi - export RUSTUP_HOME="${CARGO_HOME}/.rustup" - ${DRIVERS_TOOLS}/.evergreen/install-rust.sh - source "${CARGO_HOME}/env" - python -m pip install hatch - } - # Ensure hatch does not write to user or global locations. - touch hatch_config.toml - HATCH_CONFIG=$(pwd)/hatch_config.toml - if [ "Windows_NT" = "${OS:-}" ]; then # Magic variable in cygwin - HATCH_CONFIG=$(cygpath -m "$HATCH_CONFIG") - fi - export HATCH_CONFIG - hatch config restore - hatch config set dirs.data "$(pwd)/.hatch/data" - hatch config set dirs.cache "$(pwd)/.hatch/cache" - - echo "Installing hatch... done." -fi -hatch --version -popd > /dev/null diff --git a/.evergreen/scripts/install-dependencies.sh b/.evergreen/scripts/install-dependencies.sh index ebcc8f3069..bbbfc745ec 100755 --- a/.evergreen/scripts/install-dependencies.sh +++ b/.evergreen/scripts/install-dependencies.sh @@ -1,6 +1,6 @@ #!/bin/bash -set -o xtrace +set -eu file="$PROJECT_DIRECTORY/.evergreen/install-dependencies.sh" # Don't use ${file} syntax here because evergreen treats it as an empty expansion. [ -f "$file" ] && bash "$file" || echo "$file not available, skipping" diff --git a/.evergreen/scripts/run-atlas-tests.sh b/.evergreen/scripts/run-atlas-tests.sh index 98a19f047f..30b8d5a615 100755 --- a/.evergreen/scripts/run-atlas-tests.sh +++ b/.evergreen/scripts/run-atlas-tests.sh @@ -4,4 +4,4 @@ set +x set -o errexit bash "${DRIVERS_TOOLS}"/.evergreen/auth_aws/setup_secrets.sh drivers/atlas_connect -TEST_ATLAS=1 bash "${PROJECT_DIRECTORY}"/.evergreen/hatch.sh test:test-eg +TEST_ATLAS=1 bash "${PROJECT_DIRECTORY}"/.evergreen/just.sh test-eg diff --git a/.evergreen/scripts/run-doctests.sh b/.evergreen/scripts/run-doctests.sh index f7215ad347..5950e2c107 100755 --- a/.evergreen/scripts/run-doctests.sh +++ b/.evergreen/scripts/run-doctests.sh @@ -1,4 +1,4 @@ #!/bin/bash set -o xtrace -PYTHON_BINARY=${PYTHON_BINARY} bash "${PROJECT_DIRECTORY}"/.evergreen/hatch.sh doctest:test +PYTHON_BINARY=${PYTHON_BINARY} bash "${PROJECT_DIRECTORY}"/.evergreen/just.sh docs-test diff --git a/.evergreen/scripts/run-enterprise-auth-tests.sh b/.evergreen/scripts/run-enterprise-auth-tests.sh index 7f936b1955..e015a34ca4 100755 --- a/.evergreen/scripts/run-enterprise-auth-tests.sh +++ b/.evergreen/scripts/run-enterprise-auth-tests.sh @@ -5,4 +5,4 @@ set -eu set +x # Use the default python to bootstrap secrets. bash "${DRIVERS_TOOLS}"/.evergreen/secrets_handling/setup-secrets.sh drivers/enterprise_auth -TEST_ENTERPRISE_AUTH=1 AUTH=auth bash "${PROJECT_DIRECTORY}"/.evergreen/hatch.sh test:test-eg +TEST_ENTERPRISE_AUTH=1 AUTH=auth bash "${PROJECT_DIRECTORY}"/.evergreen/just.sh test-eg diff --git a/.evergreen/scripts/run-gcpkms-fail-test.sh b/.evergreen/scripts/run-gcpkms-fail-test.sh index dd9d522c8a..594a2984fa 100755 --- a/.evergreen/scripts/run-gcpkms-fail-test.sh +++ b/.evergreen/scripts/run-gcpkms-fail-test.sh @@ -4,4 +4,4 @@ export PYTHON_BINARY=/opt/mongodbtoolchain/v4/bin/python3 export LIBMONGOCRYPT_URL=https://s3.amazonaws.com/mciuploads/libmongocrypt/debian11/master/latest/libmongocrypt.tar.gz SKIP_SERVERS=1 bash ./.evergreen/setup-encryption.sh -SUCCESS=false TEST_FLE_GCP_AUTO=1 ./.evergreen/hatch.sh test:test-eg +SUCCESS=false TEST_FLE_GCP_AUTO=1 ./.evergreen/just.sh test-eg diff --git a/.evergreen/scripts/run-mockupdb-tests.sh b/.evergreen/scripts/run-mockupdb-tests.sh index 8825a0237d..32594f05d3 100755 --- a/.evergreen/scripts/run-mockupdb-tests.sh +++ b/.evergreen/scripts/run-mockupdb-tests.sh @@ -2,4 +2,4 @@ set -o xtrace export PYTHON_BINARY=${PYTHON_BINARY} -bash "${PROJECT_DIRECTORY}"/.evergreen/hatch.sh test:test-mockupdb +bash "${PROJECT_DIRECTORY}"/.evergreen/just.sh test-mockupdb diff --git a/.evergreen/scripts/run-mongodb-aws-test.sh b/.evergreen/scripts/run-mongodb-aws-test.sh index ec20bfd06b..88c3236b3f 100755 --- a/.evergreen/scripts/run-mongodb-aws-test.sh +++ b/.evergreen/scripts/run-mongodb-aws-test.sh @@ -30,4 +30,4 @@ set -x export TEST_AUTH_AWS=1 export AUTH="auth" export SET_XTRACE_ON=1 -bash ./.evergreen/hatch.sh test:test-eg +bash ./.evergreen/just.sh test-eg diff --git a/.evergreen/scripts/run-ocsp-test.sh b/.evergreen/scripts/run-ocsp-test.sh index 3c6d3b2b3b..328bd2f203 100755 --- a/.evergreen/scripts/run-ocsp-test.sh +++ b/.evergreen/scripts/run-ocsp-test.sh @@ -4,5 +4,5 @@ TEST_OCSP=1 \ PYTHON_BINARY="${PYTHON_BINARY}" \ CA_FILE="${DRIVERS_TOOLS}/.evergreen/ocsp/${OCSP_ALGORITHM}/ca.pem" \ OCSP_TLS_SHOULD_SUCCEED="${OCSP_TLS_SHOULD_SUCCEED}" \ -bash "${PROJECT_DIRECTORY}"/.evergreen/hatch.sh test:test-eg +bash "${PROJECT_DIRECTORY}"/.evergreen/just.sh test-eg bash "${DRIVERS_TOOLS}"/.evergreen/ocsp/teardown.sh diff --git a/.evergreen/scripts/run-tests.sh b/.evergreen/scripts/run-tests.sh index 6986a0bbee..ea923b3f5e 100755 --- a/.evergreen/scripts/run-tests.sh +++ b/.evergreen/scripts/run-tests.sh @@ -51,4 +51,4 @@ GREEN_FRAMEWORK=${GREEN_FRAMEWORK} \ TEST_DATA_LAKE=${TEST_DATA_LAKE:-} \ TEST_SUITES=${TEST_SUITES:-} \ MONGODB_API_VERSION=${MONGODB_API_VERSION} \ - bash "${PROJECT_DIRECTORY}"/.evergreen/hatch.sh test:test-eg + bash "${PROJECT_DIRECTORY}"/.evergreen/just.sh test-eg diff --git a/.evergreen/scripts/setup-dev-env.sh b/.evergreen/scripts/setup-dev-env.sh new file mode 100755 index 0000000000..7042871942 --- /dev/null +++ b/.evergreen/scripts/setup-dev-env.sh @@ -0,0 +1,72 @@ +#!/bin/bash + +set -eu + +HERE=$(dirname ${BASH_SOURCE:-$0}) +pushd "$(dirname "$(dirname $HERE)")" > /dev/null + +# Source the env file to pick up common variables. +if [ -f $HERE/scripts/env.sh ]; then + source $HERE/scripts/env.sh +fi + +# Set the location of the python bin dir. +if [ "Windows_NT" = "${OS:-}" ]; then + BIN_DIR=.venv/Scripts +else + BIN_DIR=.venv/bin +fi + +# Ensure there is a python venv. +if [ ! -d $BIN_DIR ]; then + . .evergreen/utils.sh + + if [ -z "${PYTHON_BINARY:-}" ]; then + PYTHON_BINARY=$(find_python3) + fi + + echo "Creating virtual environment..." + createvirtualenv "$PYTHON_BINARY" .venv + echo "Creating virtual environment... done." +fi + +# Activate the virtual env. +. $BIN_DIR/activate + +# Ensure there is a local hatch. +if [ ! -f $BIN_DIR/hatch ]; then + echo "Installing hatch..." + python -m pip install hatch || { + # CARGO_HOME is defined in configure-env.sh + export CARGO_HOME=${CARGO_HOME:-$HOME/.cargo/} + export RUSTUP_HOME="${CARGO_HOME}/.rustup" + ${DRIVERS_TOOLS}/.evergreen/install-rust.sh + source "${CARGO_HOME}/env" + python -m pip install hatch + } + echo "Installing hatch... done." +fi + +# Ensure hatch does not write to user or global locations. +HATCH_CONFIG=${HATCH_CONFIG:-hatch_config.toml} +if [ ! -f ${HATCH_CONFIG} ]; then + touch hatch_config.toml + hatch config restore + hatch config set dirs.data "$(pwd)/.hatch/data" + hatch config set dirs.cache "$(pwd)/.hatch/cache" +fi + +# Ensure there is a local pre-commit if there is a git checkout. +if [ -d .git ]; then + if [ ! -f $BIN_DIR/pre-commit ]; then + python -m pip install pre-commit + fi + + # Ensure the pre-commit hook is installed. + if [ ! -f .git/hooks/pre-commit ]; then + pre-commit install + fi +fi + +# Install pymongo and its test deps. +python -m pip install ".[test]" diff --git a/.evergreen/scripts/setup-encryption.sh b/.evergreen/scripts/setup-encryption.sh index 2f167cd20b..5b73240205 100755 --- a/.evergreen/scripts/setup-encryption.sh +++ b/.evergreen/scripts/setup-encryption.sh @@ -1,5 +1,5 @@ #!/bin/bash if [ -n "${test_encryption}" ]; then - ./.evergreen/hatch.sh encryption:setup + bash .evergreen/setup-encryption.sh fi diff --git a/.evergreen/setup-spawn-host.sh b/.evergreen/setup-spawn-host.sh index 4109e59183..c20e1c756e 100755 --- a/.evergreen/setup-spawn-host.sh +++ b/.evergreen/setup-spawn-host.sh @@ -16,4 +16,4 @@ rsync -az -e ssh --exclude '.git' --filter=':- .gitignore' -r . $target:$remote_ echo "Copying files to $target... done" ssh $target $remote_dir/.evergreen/scripts/setup-system.sh -ssh $target "PYTHON_BINARY=${PYTHON_BINARY:-} $remote_dir/.evergreen/scripts/ensure-hatch.sh" +ssh $target "cd $remote_dir && PYTHON_BINARY=${PYTHON_BINARY:-} just install" diff --git a/.evergreen/utils.sh b/.evergreen/utils.sh index d3af2dcc7a..e044b3d766 100755 --- a/.evergreen/utils.sh +++ b/.evergreen/utils.sh @@ -1,6 +1,6 @@ -#!/bin/bash -ex +#!/bin/bash -set -o xtrace +set -eu find_python3() { PYTHON="" diff --git a/.github/workflows/test-python.yml b/.github/workflows/test-python.yml index 2310b7698d..a41daaabb1 100644 --- a/.github/workflows/test-python.yml +++ b/.github/workflows/test-python.yml @@ -27,12 +27,14 @@ jobs: python-version: "3.9" cache: 'pip' cache-dependency-path: 'pyproject.toml' + - name: Install just + uses: extractions/setup-just@v2 - name: Install Python dependencies run: | - python -m pip install -U pip hatch + just install - name: Run linters run: | - hatch run lint:run-manual + just lint-manual - name: Run compilation run: | export PYMONGO_C_EXT_MUST_BUILD=1 @@ -40,7 +42,7 @@ jobs: python tools/fail_if_no_c.py - name: Run typecheck run: | - hatch run typing:check + just typing - run: | sudo apt-get install -y cppcheck - run: | @@ -73,18 +75,16 @@ jobs: cache: 'pip' cache-dependency-path: 'pyproject.toml' allow-prereleases: true + - name: Install just + uses: extractions/setup-just@v2 - name: Install dependencies run: | - pip install -U pip - if [[ "${{ matrix.python-version }}" == "3.13" ]]; then - pip install --pre cffi setuptools - pip install --no-build-isolation hatch - elif [[ "${{ matrix.python-version }}" == "3.13t" ]]; then - # Hatch can't be installed on 3.13t, use pytest directly. + if [[ "${{ matrix.python-version }}" == "3.13t" ]]; then + # Just can't be installed on 3.13t, use pytest directly. pip install . pip install -r requirements/test.txt else - pip install hatch + just install fi - name: Start MongoDB uses: supercharge/mongodb-github-action@1.12.0 @@ -95,7 +95,7 @@ jobs: if [[ "${{ matrix.python-version }}" == "3.13t" ]]; then pytest -v --durations=5 --maxfail=10 else - hatch run test:test + just test fi doctest: @@ -111,16 +111,18 @@ jobs: python-version: "3.9" cache: 'pip' cache-dependency-path: 'pyproject.toml' - - name: Install dependencies - run: | - pip install -U hatch pip + - name: Install just + uses: extractions/setup-just@v2 - name: Start MongoDB uses: supercharge/mongodb-github-action@1.12.0 with: mongodb-version: '8.0.0-rc4' + - name: Install dependencies + run: | + just install - name: Run tests run: | - hatch run doctest:test + just docs-test docs: name: Docs Checks @@ -135,12 +137,14 @@ jobs: cache-dependency-path: 'pyproject.toml' # Build docs on lowest supported Python for furo python-version: '3.9' + - name: Install just + uses: extractions/setup-just@v2 - name: Install dependencies run: | - pip install -U pip hatch + just install - name: Build docs run: | - hatch run doc:build + just docs linkcheck: name: Link Check @@ -155,12 +159,14 @@ jobs: cache-dependency-path: 'pyproject.toml' # Build docs on lowest supported Python for furo python-version: '3.9' + - name: Install just + uses: extractions/setup-just@v2 - name: Install dependencies run: | - pip install -U pip hatch + just install - name: Build docs run: | - hatch run doc:linkcheck + just docs-linkcheck typing: name: Typing Tests @@ -177,12 +183,14 @@ jobs: python-version: "${{matrix.python}}" cache: 'pip' cache-dependency-path: 'pyproject.toml' + - name: Install just + uses: extractions/setup-just@v2 - name: Install dependencies run: | - pip install -U pip hatch + just install - name: Run typecheck run: | - hatch run typing:check + just typing make_sdist: runs-on: ubuntu-latest diff --git a/.gitignore b/.gitignore index e4587125e8..01f896d316 100644 --- a/.gitignore +++ b/.gitignore @@ -25,6 +25,8 @@ libmongocrypt/ libmongocrypt_git/ hatch_config.toml .venv +expansion.yml +.evergreen/scripts/env.sh # Lambda temp files test/lambda/.aws-sam diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 814e040048..5a46151760 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -28,9 +28,10 @@ including 4 space indents and 79 character line limits. - Avoid backward breaking changes if at all possible. - Write inline documentation for new classes and methods. -- We use [hatch](https://hatch.pypa.io/dev/) for our script runner and packaging tool. +- We use [hatch](https://hatch.pypa.io/dev/) for python environment management and packaging. +- We use [just](https://just.systems/man/en/) as our task runner. - Write tests and make sure they pass (make sure you have a mongod - running on the default port, then execute `hatch run test:test` from the cmd + running on the default port, then execute `just test` from the cmd line to run the test suite). - Add yourself to doc/contributors.rst `:)` @@ -148,17 +149,18 @@ To run `pre-commit` manually, run: pre-commit run --all-files ``` -To run a manual hook like `mypy` manually, run: +To run a manual hook like `ruff` manually, run: ```bash -pre-commit run --all-files --hook-stage manual mypy +pre-commit run --all-files --hook-stage manual ruff ``` -Typically we use `hatch` to run the linters, e.g. +Typically we use `just` to run the linters, e.g. ```bash -hatch run typing:check-mypy -hatch run lint:build-manual +just install # this will install a venv with pre-commit installed, and install the pre-commit hook. +just typing-mypy +just run lint-manual ``` ## Documentation @@ -176,13 +178,13 @@ documentation including narrative docs, and the [Sphinx docstring format](https: You can build the documentation locally by running: ```bash -hatch run doc:build +just docs-build ``` When updating docs, it can be helpful to run the live docs server as: ```bash -hatch run doc:serve +just docs-serve ``` Browse to the link provided, and then as you make changes to docstrings or narrative docs, @@ -192,13 +194,14 @@ the pages will re-render and the browser will automatically refresh. ## Running Tests Locally - Ensure you have started the appropriate Mongo Server(s). -- Run `pip install hatch` to use `hatch` for testing or run - `pip install -e ".[test]"` to run `pytest` directly. -- Run `hatch run test:test` or `pytest` to run all of the tests. +- Run `just install` to set up `hatch` in a local virtual environment, or you can manually + create a virtual environment and run `pytest` directly. If you want to use a specific + version of Python, remove the `.venv` folder and set `PYTHON_BINARY` before running `just install`. +- Run `just test` or `pytest` to run all of the tests. - Append `test/.py::::` to run specific tests. You can omit the `` to test a full class and the `` to test a full module. For example: - `hatch run test:test -- test/test_change_stream.py::TestUnifiedChangeStreamsErrors::test_change_stream_errors_on_ElectionInProgress`. + `just test test/test_change_stream.py::TestUnifiedChangeStreamsErrors::test_change_stream_errors_on_ElectionInProgress`. - Use the `-k` argument to select tests by pattern. ## Running Load Balancer Tests Locally @@ -211,15 +214,15 @@ the pages will re-render and the browser will automatically refresh. - Start the load balancer using: `MONGODB_URI='mongodb://localhost:27017,localhost:27018/' $PWD/drivers-evergreen-tools/.evergreen/run-load-balancer.sh start`. - Run the tests from the `pymongo` checkout directory using: - `TEST_LOADBALANCER=1 hatch run test:test-eg`. + `TEST_LOADBALANCER=1 just test-eg`. ## Running Encryption Tests Locally - Clone `drivers-evergreen-tools`: `git clone git@github.com:mongodb-labs/drivers-evergreen-tools.git`. - Run `export DRIVERS_TOOLS=$PWD/drivers-evergreen-tools` -- Run `AWS_PROFILE= hatch run encryption:setup` after setting up your AWS profile with `aws configure sso`. -- Run the tests with `TEST_ENCRYPTION=1 hatch run test:test-eg`. -- When done, run `hatch run encryption:teardown` to clean up. +- Run `AWS_PROFILE= just setup-encryption` after setting up your AWS profile with `aws configure sso`. +- Run the tests with `TEST_ENCRYPTION=1 just test-eg`. +- When done, run `just teardown-encryption` to clean up. ## Re-sync Spec Tests diff --git a/hatch.toml b/hatch.toml index 60bd0af014..15d0f25f07 100644 --- a/hatch.toml +++ b/hatch.toml @@ -30,13 +30,6 @@ check-strict-pyright = [ ] check = ["check-mypy", "check-pyright", "check-strict-pyright"] -[envs.lint] -skip-install = true -dependencies = ["pre-commit"] -[envs.lint.scripts] -run = "pre-commit run --all-files" -run-manual = "pre-commit run --all-files --hook-stage manual" - [envs.test] features = ["test"] [envs.test.scripts] @@ -44,9 +37,3 @@ test = "pytest -v --durations=5 --maxfail=10 {args}" test-eg = "bash ./.evergreen/run-tests.sh {args}" test-async = "pytest -v --durations=5 --maxfail=10 -m default_async {args}" test-mockupdb = ["pip install -U git+https://github.com/mongodb-labs/mongo-mockup-db@master", "test -m mockupdb"] - -[envs.encryption] -skip-install = true -[envs.encryption.scripts] -setup = "bash .evergreen/setup-encryption.sh" -teardown = "bash .evergreen/teardown-encryption.sh" diff --git a/justfile b/justfile new file mode 100644 index 0000000000..23f0993c6b --- /dev/null +++ b/justfile @@ -0,0 +1,69 @@ +# See https://just.systems/man/en/ for instructions +set shell := ["bash", "-c"] +set dotenv-load +set dotenv-filename := "./.evergreen/scripts/env.sh" + +# Handle cross-platform paths to local python cli tools. +python_bin_dir := if os_family() == "windows" { "./.venv/Scripts" } else { "./.venv/bin" } +hatch_bin := python_bin_dir + "/hatch" +pre_commit_bin := python_bin_dir + "/pre-commit" + +# Make the default recipe private so it doesn't show up in the list. +[private] +default: + @just --list + +install: + bash .evergreen/scripts/setup-dev-env.sh + +[group('docs')] +docs: + {{hatch_bin}} run doc:build + +[group('docs')] +docs-serve: + {{hatch_bin}} run doc:serve + +[group('docs')] +docs-linkcheck: + {{hatch_bin}} run doc:linkcheck + +[group('docs')] +docs-test: + {{hatch_bin}} run doctest:test + +[group('typing')] +typing: + {{hatch_bin}} run typing:check + +[group('typing')] +typing-mypy: + {{hatch_bin}} run typing:mypy + +[group('lint')] +lint: + {{pre_commit_bin}} run --all-files + +[group('lint')] +lint-manual: + {{pre_commit_bin}} run --all-files --hook-stage manual + +[group('test')] +test *args: + {{hatch_bin}} run test:test {{args}} + +[group('test')] +test-mockupdb: + {{hatch_bin}} run test:test-mockupdb + +[group('test')] +test-eg *args: + {{hatch_bin}} run test:test-eg {{args}} + +[group('encryption')] +setup-encryption: + bash .evergreen/setup-encryption.sh + +[group('encryption')] +teardown-encryption: + bash .evergreen/teardown-encryption.sh From 14bc1f6be2126addb1cf4028e4654a0c568bfedd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 21 Jan 2025 06:29:11 -0600 Subject: [PATCH 159/182] Bump pyright from 1.1.391 to 1.1.392.post0 (#2067) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/typing.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/typing.txt b/requirements/typing.txt index 5a2f76f6bc..b0f0c9c7fc 100644 --- a/requirements/typing.txt +++ b/requirements/typing.txt @@ -1,5 +1,5 @@ mypy==1.14.1 -pyright==1.1.391 +pyright==1.1.392.post0 typing_extensions -r ./encryption.txt -r ./ocsp.txt From 85877a0802bc3a3ba40113aa45a8a13e2e5f86c9 Mon Sep 17 00:00:00 2001 From: Noah Stapp Date: Tue, 21 Jan 2025 10:26:21 -0500 Subject: [PATCH 160/182] PYTHON-5048 - Synchro script should correctly process all files (#2069) --- tools/synchro.py | 49 +++++++++++++++++++++++++++--------------------- 1 file changed, 28 insertions(+), 21 deletions(-) diff --git a/tools/synchro.py b/tools/synchro.py index 577e82d14e..dbcbbd1351 100644 --- a/tools/synchro.py +++ b/tools/synchro.py @@ -149,6 +149,10 @@ _gridfs_dest_base = "./gridfs/synchronous/" _test_dest_base = "./test/" +if not Path.exists(Path(_pymongo_dest_base)): + Path.mkdir(Path(_pymongo_dest_base)) +if not Path.exists(Path(_gridfs_dest_base)): + Path.mkdir(Path(_gridfs_dest_base)) async_files = [ _pymongo_base + f for f in listdir(_pymongo_base) if (Path(_pymongo_base) / f).is_file() @@ -170,18 +174,6 @@ def async_only_test(f: str) -> bool: if (Path(_test_base) / f).is_file() and not async_only_test(f) ] -sync_files = [ - _pymongo_dest_base + f - for f in listdir(_pymongo_dest_base) - if (Path(_pymongo_dest_base) / f).is_file() -] - -sync_gridfs_files = [ - _gridfs_dest_base + f - for f in listdir(_gridfs_dest_base) - if (Path(_gridfs_dest_base) / f).is_file() -] - # Add each asynchronized test here as part of the converting PR converted_tests = [ "__init__.py", @@ -223,15 +215,10 @@ def async_only_test(f: str) -> bool: "unified_format.py", ] -sync_test_files = [ - _test_dest_base + f for f in converted_tests if (Path(_test_dest_base) / f).is_file() -] - - -docstring_translate_files = sync_files + sync_gridfs_files + sync_test_files - -def process_files(files: list[str]) -> None: +def process_files( + files: list[str], docstring_translate_files: list[str], sync_test_files: list[str] +) -> None: for file in files: if "__init__" not in file or "__init__" and "test" in file: with open(file, "r+") as f: @@ -374,7 +361,27 @@ def main() -> None: unasync_directory(async_files, _pymongo_base, _pymongo_dest_base, replacements) unasync_directory(gridfs_files, _gridfs_base, _gridfs_dest_base, replacements) unasync_directory(test_files, _test_base, _test_dest_base, replacements) - process_files(sync_files + sync_gridfs_files + sync_test_files) + + sync_files = [ + _pymongo_dest_base + f + for f in listdir(_pymongo_dest_base) + if (Path(_pymongo_dest_base) / f).is_file() + ] + + sync_gridfs_files = [ + _gridfs_dest_base + f + for f in listdir(_gridfs_dest_base) + if (Path(_gridfs_dest_base) / f).is_file() + ] + sync_test_files = [ + _test_dest_base + f for f in converted_tests if (Path(_test_dest_base) / f).is_file() + ] + + docstring_translate_files = sync_files + sync_gridfs_files + sync_test_files + + process_files( + sync_files + sync_gridfs_files + sync_test_files, docstring_translate_files, sync_test_files + ) if __name__ == "__main__": From 2ff2fde9111c4c9d2165af4cec358791105b1bda Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Tue, 21 Jan 2025 12:38:02 -0600 Subject: [PATCH 161/182] PYTHON-5049 Drop support for PyPy 3.9 (#2070) --- .evergreen/generated_configs/variants.yml | 64 +++++------------------ .evergreen/scripts/generate_config.py | 2 +- .github/workflows/test-python.yml | 2 +- README.md | 2 +- doc/changelog.rst | 4 +- doc/faq.rst | 2 +- doc/installation.rst | 2 +- doc/python3.rst | 2 +- test/asynchronous/test_client.py | 5 +- test/asynchronous/test_collection.py | 8 +-- test/asynchronous/test_database.py | 5 +- test/test_client.py | 5 +- test/test_collection.py | 8 +-- test/test_database.py | 5 +- test/test_errors.py | 12 ++--- 15 files changed, 29 insertions(+), 99 deletions(-) diff --git a/.evergreen/generated_configs/variants.yml b/.evergreen/generated_configs/variants.yml index b1db61d492..79c9b22c93 100644 --- a/.evergreen/generated_configs/variants.yml +++ b/.evergreen/generated_configs/variants.yml @@ -256,15 +256,15 @@ buildvariants: expansions: COMPRESSORS: zstd PYTHON_BINARY: /opt/python/3.9/bin/python3 - - name: compression-snappy-rhel8-pypy3.9 + - name: compression-snappy-rhel8-pypy3.10 tasks: - name: .standalone .noauth .nossl .sync_async - display_name: Compression snappy RHEL8 PyPy3.9 + display_name: Compression snappy RHEL8 PyPy3.10 run_on: - rhel87-small expansions: COMPRESSORS: snappy - PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 + PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 - name: compression-zlib-rhel8-pypy3.10 tasks: - name: .standalone .noauth .nossl .sync_async @@ -274,15 +274,15 @@ buildvariants: expansions: COMPRESSORS: zlib PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 - - name: compression-zstd-rhel8-pypy3.9 + - name: compression-zstd-rhel8-pypy3.10 tasks: - name: .standalone .noauth .nossl .sync_async !.4.0 - display_name: Compression zstd RHEL8 PyPy3.9 + display_name: Compression zstd RHEL8 PyPy3.10 run_on: - rhel87-small expansions: COMPRESSORS: zstd - PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 + PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 # Disable test commands tests - name: disable-test-commands-rhel8-python3.9 @@ -460,15 +460,6 @@ buildvariants: test_encryption: "true" test_encryption_pyopenssl: "true" PYTHON_BINARY: /opt/python/3.12/bin/python3 - - name: encryption-rhel8-pypy3.9 - tasks: - - name: .sharded_cluster .auth .ssl .sync_async - display_name: Encryption RHEL8 PyPy3.9 - run_on: - - rhel87-small - expansions: - test_encryption: "true" - PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 - name: encryption-macos-python3.9 tasks: - name: .latest .replica_set .sync_async @@ -608,15 +599,6 @@ buildvariants: expansions: AUTH: auth PYTHON_BINARY: C:/python/Python313/python.exe - - name: auth-enterprise-rhel8-pypy3.9-auth - tasks: - - name: test-enterprise-auth - display_name: Auth Enterprise RHEL8 PyPy3.9 Auth - run_on: - - rhel87-small - expansions: - AUTH: auth - PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 - name: auth-enterprise-rhel8-pypy3.10-auth tasks: - name: test-enterprise-auth @@ -900,10 +882,10 @@ buildvariants: TOPOLOGY: server VERSION: "8.0" PYTHON_BINARY: /opt/python/3.13/bin/python3 - - name: ocsp-rhel8-rapid-pypy3.9 + - name: ocsp-rhel8-rapid-pypy3.10 tasks: - name: .ocsp - display_name: OCSP RHEL8 rapid PyPy3.9 + display_name: OCSP RHEL8 rapid PyPy3.10 run_on: - rhel87-small batchtime: 20160 @@ -912,11 +894,11 @@ buildvariants: SSL: ssl TOPOLOGY: server VERSION: rapid - PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 - - name: ocsp-rhel8-latest-pypy3.10 + PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 + - name: ocsp-rhel8-latest-python3.9 tasks: - name: .ocsp - display_name: OCSP RHEL8 latest PyPy3.10 + display_name: OCSP RHEL8 latest Python3.9 run_on: - rhel87-small batchtime: 20160 @@ -925,7 +907,7 @@ buildvariants: SSL: ssl TOPOLOGY: server VERSION: latest - PYTHON_BINARY: /opt/python/pypy3.10/bin/python3 + PYTHON_BINARY: /opt/python/3.9/bin/python3 - name: ocsp-win64-v4.4-python3.9 tasks: - name: .ocsp-rsa !.ocsp-staple @@ -1061,17 +1043,6 @@ buildvariants: expansions: test_pyopenssl: "true" PYTHON_BINARY: C:/python/Python313/python.exe - - name: pyopenssl-rhel8-pypy3.9 - tasks: - - name: .replica_set .auth .ssl .sync_async - - name: .7.0 .auth .ssl .sync_async - display_name: PyOpenSSL RHEL8 PyPy3.9 - run_on: - - rhel87-small - batchtime: 10080 - expansions: - test_pyopenssl: "true" - PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 - name: pyopenssl-rhel8-pypy3.10 tasks: - name: .replica_set .auth .ssl .sync_async @@ -1164,17 +1135,6 @@ buildvariants: expansions: COVERAGE: coverage PYTHON_BINARY: /opt/python/3.12/bin/python3 - - name: test-rhel8-pypy3.9 - tasks: - - name: .sharded_cluster .auth .ssl .sync_async - - name: .replica_set .noauth .ssl .sync_async - - name: .standalone .noauth .nossl .sync_async - display_name: "* Test RHEL8 PyPy3.9" - run_on: - - rhel87-small - expansions: - COVERAGE: coverage - PYTHON_BINARY: /opt/python/pypy3.9/bin/python3 - name: test-macos-python3.9 tasks: - name: .sharded_cluster .auth .ssl !.sync_async diff --git a/.evergreen/scripts/generate_config.py b/.evergreen/scripts/generate_config.py index e8d0b171bd..2917e882d8 100644 --- a/.evergreen/scripts/generate_config.py +++ b/.evergreen/scripts/generate_config.py @@ -28,7 +28,7 @@ ALL_VERSIONS = ["4.0", "4.4", "5.0", "6.0", "7.0", "8.0", "rapid", "latest"] CPYTHONS = ["3.9", "3.10", "3.11", "3.12", "3.13"] -PYPYS = ["pypy3.9", "pypy3.10"] +PYPYS = ["pypy3.10"] ALL_PYTHONS = CPYTHONS + PYPYS MIN_MAX_PYTHON = [CPYTHONS[0], CPYTHONS[-1]] BATCHTIME_WEEK = 10080 diff --git a/.github/workflows/test-python.yml b/.github/workflows/test-python.yml index a41daaabb1..3760e308a5 100644 --- a/.github/workflows/test-python.yml +++ b/.github/workflows/test-python.yml @@ -55,7 +55,7 @@ jobs: strategy: matrix: os: [ubuntu-20.04] - python-version: ["3.9", "pypy-3.9", "3.13", "3.13t"] + python-version: ["3.9", "pypy-3.10", "3.13", "3.13t"] name: CPython ${{ matrix.python-version }}-${{ matrix.os }} steps: - uses: actions/checkout@v4 diff --git a/README.md b/README.md index bd0755620e..b8e0078101 100644 --- a/README.md +++ b/README.md @@ -90,7 +90,7 @@ package that is incompatible with PyMongo. ## Dependencies -PyMongo supports CPython 3.9+ and PyPy3.9+. +PyMongo supports CPython 3.9+ and PyPy3.10+. Required dependencies: diff --git a/doc/changelog.rst b/doc/changelog.rst index fba6713bd9..4942d85de8 100644 --- a/doc/changelog.rst +++ b/doc/changelog.rst @@ -4,7 +4,7 @@ Changelog Changes in Version 4.11.0 (YYYY/MM/DD) -------------------------------------- -.. warning:: PyMongo 4.11 drops support for Python 3.8: Python 3.9+ or PyPy 3.9+ is now required. +.. warning:: PyMongo 4.11 drops support for Python 3.8 and PyPy 3.9: Python 3.9+ or PyPy 3.10+ is now required. .. warning:: PyMongo 4.11 drops support for MongoDB 3.6. PyMongo now supports MongoDB 4.0+. Driver support for MongoDB 3.6 reached end of life in April 2024. .. warning:: Driver support for MongoDB 4.0 reaches end of life in April 2025. @@ -14,7 +14,7 @@ Changes in Version 4.11.0 (YYYY/MM/DD) PyMongo 4.11 brings a number of changes including: -- Dropped support for Python 3.8. +- Dropped support for Python 3.8 and PyPy 3.9. - Dropped support for MongoDB 3.6. - Dropped support for the MONGODB-CR authenticate mechanism, which is no longer supported by MongoDB 4.0+. - pymongocrypt>=1.12 is now required for :ref:`In-Use Encryption` support. diff --git a/doc/faq.rst b/doc/faq.rst index 15950e7716..73d0ec8966 100644 --- a/doc/faq.rst +++ b/doc/faq.rst @@ -166,7 +166,7 @@ they are returned to the pool. Does PyMongo support Python 3? ------------------------------ -PyMongo supports CPython 3.9+ and PyPy3.9+. See the :doc:`python3` for details. +PyMongo supports CPython 3.9+ and PyPy3.10+. See the :doc:`python3` for details. Does PyMongo support asynchronous frameworks like Gevent, asyncio, Tornado, or Twisted? --------------------------------------------------------------------------------------- diff --git a/doc/installation.rst b/doc/installation.rst index f21a3792ad..abda06db16 100644 --- a/doc/installation.rst +++ b/doc/installation.rst @@ -28,7 +28,7 @@ To upgrade using pip:: Dependencies ------------ -PyMongo supports CPython 3.9+ and PyPy3.9+. +PyMongo supports CPython 3.9+ and PyPy3.10+. Required dependencies ..................... diff --git a/doc/python3.rst b/doc/python3.rst index 1ea43b3ccb..0a63f968a5 100644 --- a/doc/python3.rst +++ b/doc/python3.rst @@ -4,7 +4,7 @@ Python 3 FAQ What Python 3 versions are supported? ------------------------------------- -PyMongo supports CPython 3.9+ and PyPy3.9+. +PyMongo supports CPython 3.9+ and PyPy3.10+. Are there any PyMongo behavior changes with Python 3? ----------------------------------------------------- diff --git a/test/asynchronous/test_client.py b/test/asynchronous/test_client.py index db232386ee..744a170be2 100644 --- a/test/asynchronous/test_client.py +++ b/test/asynchronous/test_client.py @@ -237,10 +237,7 @@ def test_getattr(self): def test_iteration(self): client = self.client - if "PyPy" in sys.version and sys.version_info < (3, 8, 15): - msg = "'NoneType' object is not callable" - else: - msg = "'AsyncMongoClient' object is not iterable" + msg = "'AsyncMongoClient' object is not iterable" # Iteration fails with self.assertRaisesRegex(TypeError, msg): for _ in client: # type: ignore[misc] # error: "None" not callable [misc] diff --git a/test/asynchronous/test_collection.py b/test/asynchronous/test_collection.py index 528919f63c..beb58012a8 100644 --- a/test/asynchronous/test_collection.py +++ b/test/asynchronous/test_collection.py @@ -133,13 +133,7 @@ def test_getattr(self): def test_iteration(self): coll = self.db.coll - if "PyPy" in sys.version and sys.version_info < (3, 8, 15): - msg = "'NoneType' object is not callable" - else: - if _IS_SYNC: - msg = "'Collection' object is not iterable" - else: - msg = "'AsyncCollection' object is not iterable" + msg = "'AsyncCollection' object is not iterable" # Iteration fails with self.assertRaisesRegex(TypeError, msg): for _ in coll: # type: ignore[misc] # error: "None" not callable [misc] diff --git a/test/asynchronous/test_database.py b/test/asynchronous/test_database.py index b5a5960420..55a8cc3ab2 100644 --- a/test/asynchronous/test_database.py +++ b/test/asynchronous/test_database.py @@ -103,10 +103,7 @@ def test_getattr(self): def test_iteration(self): db = self.client.pymongo_test - if "PyPy" in sys.version and sys.version_info < (3, 8, 15): - msg = "'NoneType' object is not callable" - else: - msg = "'AsyncDatabase' object is not iterable" + msg = "'AsyncDatabase' object is not iterable" # Iteration fails with self.assertRaisesRegex(TypeError, msg): for _ in db: # type: ignore[misc] # error: "None" not callable [misc] diff --git a/test/test_client.py b/test/test_client.py index 5ec425f312..2a33077f5f 100644 --- a/test/test_client.py +++ b/test/test_client.py @@ -234,10 +234,7 @@ def test_getattr(self): def test_iteration(self): client = self.client - if "PyPy" in sys.version and sys.version_info < (3, 8, 15): - msg = "'NoneType' object is not callable" - else: - msg = "'MongoClient' object is not iterable" + msg = "'MongoClient' object is not iterable" # Iteration fails with self.assertRaisesRegex(TypeError, msg): for _ in client: # type: ignore[misc] # error: "None" not callable [misc] diff --git a/test/test_collection.py b/test/test_collection.py index af524bba47..8a862646eb 100644 --- a/test/test_collection.py +++ b/test/test_collection.py @@ -133,13 +133,7 @@ def test_getattr(self): def test_iteration(self): coll = self.db.coll - if "PyPy" in sys.version and sys.version_info < (3, 8, 15): - msg = "'NoneType' object is not callable" - else: - if _IS_SYNC: - msg = "'Collection' object is not iterable" - else: - msg = "'Collection' object is not iterable" + msg = "'Collection' object is not iterable" # Iteration fails with self.assertRaisesRegex(TypeError, msg): for _ in coll: # type: ignore[misc] # error: "None" not callable [misc] diff --git a/test/test_database.py b/test/test_database.py index 5e854c941d..aad9089bd8 100644 --- a/test/test_database.py +++ b/test/test_database.py @@ -102,10 +102,7 @@ def test_getattr(self): def test_iteration(self): db = self.client.pymongo_test - if "PyPy" in sys.version and sys.version_info < (3, 8, 15): - msg = "'NoneType' object is not callable" - else: - msg = "'Database' object is not iterable" + msg = "'Database' object is not iterable" # Iteration fails with self.assertRaisesRegex(TypeError, msg): for _ in db: # type: ignore[misc] # error: "None" not callable [misc] diff --git a/test/test_errors.py b/test/test_errors.py index 2cee7c15d8..d6db6a4ec1 100644 --- a/test/test_errors.py +++ b/test/test_errors.py @@ -47,15 +47,9 @@ def test_operation_failure(self): self.assertIn("full error", traceback.format_exc()) def _test_unicode_strs(self, exc): - if sys.implementation.name == "pypy" and sys.implementation.version < (7, 3, 7): - # PyPy used to display unicode in repr differently. - self.assertEqual( - "unicode \U0001f40d, full error: {'errmsg': 'unicode \\U0001f40d'}", str(exc) - ) - else: - self.assertEqual( - "unicode \U0001f40d, full error: {'errmsg': 'unicode \U0001f40d'}", str(exc) - ) + self.assertEqual( + "unicode \U0001f40d, full error: {'errmsg': 'unicode \U0001f40d'}", str(exc) + ) try: raise exc except Exception: From 7dba1e5dd9c8b483e327f295f8b0bf0cad5e3be8 Mon Sep 17 00:00:00 2001 From: Jib Date: Tue, 21 Jan 2025 14:37:52 -0500 Subject: [PATCH 162/182] PYTHON-5043: Fix list[int, float] typo in binary.py (#2066) --- bson/binary.py | 7 +++++-- test/test_typing.py | 21 ++++++++++++++++++++- 2 files changed, 25 insertions(+), 3 deletions(-) diff --git a/bson/binary.py b/bson/binary.py index 6dc5058c2c..f90dce226c 100644 --- a/bson/binary.py +++ b/bson/binary.py @@ -405,14 +405,17 @@ def from_vector(cls: Type[Binary], vector: BinaryVector) -> Binary: @classmethod @overload def from_vector( - cls: Type[Binary], vector: list[int, float], dtype: BinaryVectorDtype, padding: int = 0 + cls: Type[Binary], + vector: Union[list[int], list[float]], + dtype: BinaryVectorDtype, + padding: int = 0, ) -> Binary: ... @classmethod def from_vector( cls: Type[Binary], - vector: Union[BinaryVector, list[int, float]], + vector: Union[BinaryVector, list[int], list[float]], dtype: Optional[BinaryVectorDtype] = None, padding: Optional[int] = None, ) -> Binary: diff --git a/test/test_typing.py b/test/test_typing.py index bfe4d032c1..65937020d2 100644 --- a/test/test_typing.py +++ b/test/test_typing.py @@ -15,6 +15,7 @@ """Test that each file in mypy_fails/ actually fails mypy, and test some sample client code that uses PyMongo typings. """ + from __future__ import annotations import os @@ -37,7 +38,8 @@ if TYPE_CHECKING: from typing_extensions import NotRequired, TypedDict - from bson import ObjectId + from bson import Binary, ObjectId + from bson.binary import BinaryVector, BinaryVectorDtype class Movie(TypedDict): name: str @@ -591,5 +593,22 @@ def test_son_document_type(self) -> None: obj["a"] = 1 +class TestBSONFromVectorType(unittest.TestCase): + @only_type_check + def test_from_vector_binaryvector(self): + list_vector = BinaryVector([127, 7], BinaryVectorDtype.INT8) + Binary.from_vector(list_vector) + + @only_type_check + def test_from_vector_list_int(self): + list_vector = [127, 7] + Binary.from_vector(list_vector, BinaryVectorDtype.INT8) + + @only_type_check + def test_from_vector_list_float(self): + list_vector = [127.0, 7.0] + Binary.from_vector(list_vector, BinaryVectorDtype.INT8) + + if __name__ == "__main__": unittest.main() From 2235b8354cef0acc0b41321fc103d14acf0ef92f Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Tue, 21 Jan 2025 16:22:14 -0600 Subject: [PATCH 163/182] PYTHON-5050 Clean up handling of installed dependencies across deployment targets (#2071) --- .evergreen/install-dependencies.sh | 45 ---------------------- .evergreen/run-azurekms-test.sh | 13 +++++-- .evergreen/run-gcpkms-test.sh | 11 ++++-- .evergreen/scripts/install-dependencies.sh | 42 ++++++++++++++++++-- .evergreen/scripts/prepare-resources.sh | 23 ++++++++--- .evergreen/scripts/setup-dev-env.sh | 2 + 6 files changed, 75 insertions(+), 61 deletions(-) delete mode 100755 .evergreen/install-dependencies.sh diff --git a/.evergreen/install-dependencies.sh b/.evergreen/install-dependencies.sh deleted file mode 100755 index 8773fa2c6d..0000000000 --- a/.evergreen/install-dependencies.sh +++ /dev/null @@ -1,45 +0,0 @@ -#!/bin/bash -set -eu - -# Copy PyMongo's test certificates over driver-evergreen-tools' -cp ${PROJECT_DIRECTORY}/test/certificates/* ${DRIVERS_TOOLS}/.evergreen/x509gen/ - -# Replace MongoOrchestration's client certificate. -cp ${PROJECT_DIRECTORY}/test/certificates/client.pem ${MONGO_ORCHESTRATION_HOME}/lib/client.pem - -if [ -w /etc/hosts ]; then - SUDO="" -else - SUDO="sudo" -fi - -# Install just. -# On Evergreen jobs, "CI" will be set, and we don't want to write to $HOME. -if [ "${CI:-}" == "true" ]; then - BIN_DIR=$DRIVERS_TOOLS_BINARIES -else - BIN_DIR=$HOME/.local/bin -fi -if [ ! -f $BIN_DIR/just ]; then - if [ "Windows_NT" = "${OS:-}" ]; then - TARGET="--target x86_64-pc-windows-msvc" - else - TARGET="" - fi - curl --proto '=https' --tlsv1.2 -sSf https://just.systems/install.sh | bash -s -- $TARGET --to "$BIN_DIR" || { - # CARGO_HOME is defined in configure-env.sh - export CARGO_HOME=${CARGO_HOME:-$HOME/.cargo/} - export RUSTUP_HOME="${CARGO_HOME}/.rustup" - . ${DRIVERS_TOOLS}/.evergreen/install-rust.sh - cargo install just - if [ "Windows_NT" = "${OS:-}" ]; then - mv $CARGO_HOME/just.exe $BIN_DIR/just - else - mv $CARGO_HOME/just $BIN_DIR - fi - } -fi - -# Add 'server' and 'hostname_not_in_cert' as a hostnames -echo "127.0.0.1 server" | $SUDO tee -a /etc/hosts -echo "127.0.0.1 hostname_not_in_cert" | $SUDO tee -a /etc/hosts diff --git a/.evergreen/run-azurekms-test.sh b/.evergreen/run-azurekms-test.sh index d5c332fa8d..28a84a52e2 100755 --- a/.evergreen/run-azurekms-test.sh +++ b/.evergreen/run-azurekms-test.sh @@ -8,17 +8,22 @@ export AZUREKMS_VMNAME=${AZUREKMS_VMNAME} export AZUREKMS_PRIVATEKEYPATH=/tmp/testazurekms_privatekey export LIBMONGOCRYPT_URL=https://s3.amazonaws.com/mciuploads/libmongocrypt/debian11/master/latest/libmongocrypt.tar.gz SKIP_SERVERS=1 bash $HERE/setup-encryption.sh -tar czf /tmp/mongo-python-driver.tgz . +# Set up the remote files to test. +git add . +git commit -m "add files" || true +git archive -o /tmp/mongo-python-driver.tar HEAD +tar -rf /tmp/mongo-python-driver.tar libmongocrypt +gzip -f /tmp/mongo-python-driver.tar # shellcheck disable=SC2088 -AZUREKMS_SRC="/tmp/mongo-python-driver.tgz" AZUREKMS_DST="~/" \ +AZUREKMS_SRC="/tmp/mongo-python-driver.tar.gz" AZUREKMS_DST="~/" \ $DRIVERS_TOOLS/.evergreen/csfle/azurekms/copy-file.sh echo "Copying files ... end" echo "Untarring file ... begin" -AZUREKMS_CMD="tar xf mongo-python-driver.tgz" \ +AZUREKMS_CMD="tar xf mongo-python-driver.tar.gz" \ $DRIVERS_TOOLS/.evergreen/csfle/azurekms/run-command.sh echo "Untarring file ... end" echo "Running test ... begin" -AZUREKMS_CMD="KEY_NAME=\"$AZUREKMS_KEYNAME\" KEY_VAULT_ENDPOINT=\"$AZUREKMS_KEYVAULTENDPOINT\" SUCCESS=true TEST_FLE_AZURE_AUTO=1 ./.evergreen/just.sh test-eg" \ +AZUREKMS_CMD="KEY_NAME=\"$AZUREKMS_KEYNAME\" KEY_VAULT_ENDPOINT=\"$AZUREKMS_KEYVAULTENDPOINT\" SUCCESS=true TEST_FLE_AZURE_AUTO=1 bash ./.evergreen/just.sh test-eg" \ $DRIVERS_TOOLS/.evergreen/csfle/azurekms/run-command.sh echo "Running test ... end" bash $HERE/teardown-encryption.sh diff --git a/.evergreen/run-gcpkms-test.sh b/.evergreen/run-gcpkms-test.sh index 4c953584b2..37ec2bfe56 100755 --- a/.evergreen/run-gcpkms-test.sh +++ b/.evergreen/run-gcpkms-test.sh @@ -10,11 +10,16 @@ export GCPKMS_ZONE=${GCPKMS_ZONE} export GCPKMS_INSTANCENAME=${GCPKMS_INSTANCENAME} export LIBMONGOCRYPT_URL=https://s3.amazonaws.com/mciuploads/libmongocrypt/debian11/master/latest/libmongocrypt.tar.gz SKIP_SERVERS=1 bash $HERE/setup-encryption.sh -tar czf /tmp/mongo-python-driver.tgz . -GCPKMS_SRC=/tmp/mongo-python-driver.tgz GCPKMS_DST=$GCPKMS_INSTANCENAME: $DRIVERS_TOOLS/.evergreen/csfle/gcpkms/copy-file.sh +# Set up the remote files to test. +git add . +git commit -m "add files" || true +git archive -o /tmp/mongo-python-driver.tar HEAD +tar -rf /tmp/mongo-python-driver.tar libmongocrypt +gzip -f /tmp/mongo-python-driver.tar +GCPKMS_SRC=/tmp/mongo-python-driver.tar.gz GCPKMS_DST=$GCPKMS_INSTANCENAME: $DRIVERS_TOOLS/.evergreen/csfle/gcpkms/copy-file.sh echo "Copying files ... end" echo "Untarring file ... begin" -GCPKMS_CMD="tar xf mongo-python-driver.tgz" $DRIVERS_TOOLS/.evergreen/csfle/gcpkms/run-command.sh +GCPKMS_CMD="tar xf mongo-python-driver.tar.gz" $DRIVERS_TOOLS/.evergreen/csfle/gcpkms/run-command.sh echo "Untarring file ... end" echo "Running test ... begin" GCPKMS_CMD="SUCCESS=true TEST_FLE_GCP_AUTO=1 ./.evergreen/just.sh test-eg" $DRIVERS_TOOLS/.evergreen/csfle/gcpkms/run-command.sh diff --git a/.evergreen/scripts/install-dependencies.sh b/.evergreen/scripts/install-dependencies.sh index bbbfc745ec..2b127889aa 100755 --- a/.evergreen/scripts/install-dependencies.sh +++ b/.evergreen/scripts/install-dependencies.sh @@ -1,6 +1,42 @@ #!/bin/bash set -eu -file="$PROJECT_DIRECTORY/.evergreen/install-dependencies.sh" -# Don't use ${file} syntax here because evergreen treats it as an empty expansion. -[ -f "$file" ] && bash "$file" || echo "$file not available, skipping" + +# On Evergreen jobs, "CI" will be set, and we don't want to write to $HOME. +if [ "${CI:-}" == "true" ]; then + _BIN_DIR=${DRIVERS_TOOLS_BINARIES:-} +else + _BIN_DIR=$HOME/.local/bin +fi + + +# Helper function to pip install a dependency using a temporary python env. +function _pip_install() { + _HERE=$(dirname ${BASH_SOURCE:-$0}) + . $_HERE/../utils.sh + _VENV_PATH=$(mktemp -d) + echo "Installing $2 using pip..." + createvirtualenv "$(find_python3)" $_VENV_PATH + python -m pip install $1 + ln -s "$(which $2)" $_BIN_DIR/$2 + echo "Installing $2 using pip... done." +} + + +# Ensure just is installed. +if ! command -v just 2>/dev/null; then + # On most systems we can install directly. + _TARGET="" + if [ "Windows_NT" = "${OS:-}" ]; then + _TARGET="--target x86_64-pc-windows-msvc" + fi + echo "Installing just..." + mkdir -p "$_BIN_DIR" 2>/dev/null || true + curl --proto '=https' --tlsv1.2 -sSf https://just.systems/install.sh | bash -s -- $_TARGET --to "$_BIN_DIR" || { + _pip_install rust-just just + } + if ! command -v just 2>/dev/null; then + export PATH="$PATH:$_BIN_DIR" + fi + echo "Installing just... done." +fi diff --git a/.evergreen/scripts/prepare-resources.sh b/.evergreen/scripts/prepare-resources.sh index 3cfa2c4efd..da869e7055 100755 --- a/.evergreen/scripts/prepare-resources.sh +++ b/.evergreen/scripts/prepare-resources.sh @@ -6,12 +6,23 @@ pushd $HERE . env.sh rm -rf $DRIVERS_TOOLS -if [ "$PROJECT" = "drivers-tools" ]; then - # If this was a patch build, doing a fresh clone would not actually test the patch - cp -R $PROJECT_DIRECTORY/ $DRIVERS_TOOLS -else - git clone https://github.com/mongodb-labs/drivers-evergreen-tools.git $DRIVERS_TOOLS -fi +git clone https://github.com/mongodb-labs/drivers-evergreen-tools.git $DRIVERS_TOOLS echo "{ \"releases\": { \"default\": \"$MONGODB_BINARIES\" }}" >$MONGO_ORCHESTRATION_HOME/orchestration.config popd + +# Copy PyMongo's test certificates over driver-evergreen-tools' +cp ${PROJECT_DIRECTORY}/test/certificates/* ${DRIVERS_TOOLS}/.evergreen/x509gen/ + +# Replace MongoOrchestration's client certificate. +cp ${PROJECT_DIRECTORY}/test/certificates/client.pem ${MONGO_ORCHESTRATION_HOME}/lib/client.pem + +if [ -w /etc/hosts ]; then + SUDO="" +else + SUDO="sudo" +fi + +# Add 'server' and 'hostname_not_in_cert' as a hostnames +echo "127.0.0.1 server" | $SUDO tee -a /etc/hosts +echo "127.0.0.1 hostname_not_in_cert" | $SUDO tee -a /etc/hosts diff --git a/.evergreen/scripts/setup-dev-env.sh b/.evergreen/scripts/setup-dev-env.sh index 7042871942..bfe0bc5b9a 100755 --- a/.evergreen/scripts/setup-dev-env.sh +++ b/.evergreen/scripts/setup-dev-env.sh @@ -17,6 +17,8 @@ else BIN_DIR=.venv/bin fi +. $HERE/install-dependencies.sh + # Ensure there is a python venv. if [ ! -d $BIN_DIR ]; then . .evergreen/utils.sh From f1af9178946c51f8f200cf6960f37b610c294158 Mon Sep 17 00:00:00 2001 From: Noah Stapp Date: Wed, 22 Jan 2025 08:49:16 -0500 Subject: [PATCH 164/182] =?UTF-8?q?PYTHON-5044=20-=20Fix=20successive=20As?= =?UTF-8?q?yncMongoClients=20on=20a=20single=20loop=20always=20ti=E2=80=A6?= =?UTF-8?q?=20(#2065)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- pymongo/network_layer.py | 31 ++++++++++++------- pymongo/periodic_executor.py | 9 +----- ...nnections_survive_primary_stepdown_spec.py | 1 - ...nnections_survive_primary_stepdown_spec.py | 1 - 4 files changed, 20 insertions(+), 22 deletions(-) diff --git a/pymongo/network_layer.py b/pymongo/network_layer.py index c1db31f89c..11c66bf16e 100644 --- a/pymongo/network_layer.py +++ b/pymongo/network_layer.py @@ -267,18 +267,25 @@ async def async_receive_data( else: read_task = create_task(_async_receive(sock, length, loop)) # type: ignore[arg-type] tasks = [read_task, cancellation_task] - done, pending = await asyncio.wait( - tasks, timeout=timeout, return_when=asyncio.FIRST_COMPLETED - ) - for task in pending: - task.cancel() - if pending: - await asyncio.wait(pending) - if len(done) == 0: - raise socket.timeout("timed out") - if read_task in done: - return read_task.result() - raise _OperationCancelled("operation cancelled") + try: + done, pending = await asyncio.wait( + tasks, timeout=timeout, return_when=asyncio.FIRST_COMPLETED + ) + for task in pending: + task.cancel() + if pending: + await asyncio.wait(pending) + if len(done) == 0: + raise socket.timeout("timed out") + if read_task in done: + return read_task.result() + raise _OperationCancelled("operation cancelled") + except asyncio.CancelledError: + for task in tasks: + task.cancel() + await asyncio.wait(tasks) + raise + finally: sock.settimeout(sock_timeout) diff --git a/pymongo/periodic_executor.py b/pymongo/periodic_executor.py index 2f89b91deb..9b10f6e7e3 100644 --- a/pymongo/periodic_executor.py +++ b/pymongo/periodic_executor.py @@ -78,14 +78,7 @@ def close(self, dummy: Any = None) -> None: async def join(self, timeout: Optional[int] = None) -> None: if self._task is not None: - try: - await asyncio.wait_for(self._task, timeout=timeout) # type-ignore: [arg-type] - except asyncio.TimeoutError: - # Task timed out - pass - except asyncio.exceptions.CancelledError: - # Task was already finished, or not yet started. - raise + await asyncio.wait([self._task], timeout=timeout) # type-ignore: [arg-type] def wake(self) -> None: """Execute the target function soon.""" diff --git a/test/asynchronous/test_connections_survive_primary_stepdown_spec.py b/test/asynchronous/test_connections_survive_primary_stepdown_spec.py index 4795d3937a..7c11742a90 100644 --- a/test/asynchronous/test_connections_survive_primary_stepdown_spec.py +++ b/test/asynchronous/test_connections_survive_primary_stepdown_spec.py @@ -22,7 +22,6 @@ from test.asynchronous import ( AsyncIntegrationTest, async_client_context, - reset_client_context, unittest, ) from test.asynchronous.helpers import async_repl_set_step_down diff --git a/test/test_connections_survive_primary_stepdown_spec.py b/test/test_connections_survive_primary_stepdown_spec.py index 1fb08cbed5..9cac633301 100644 --- a/test/test_connections_survive_primary_stepdown_spec.py +++ b/test/test_connections_survive_primary_stepdown_spec.py @@ -22,7 +22,6 @@ from test import ( IntegrationTest, client_context, - reset_client_context, unittest, ) from test.helpers import repl_set_step_down From cfe7784db952ccad09ab6d3afd9629fca792a85a Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Wed, 22 Jan 2025 08:48:17 -0600 Subject: [PATCH 165/182] PYTHON-4976 Replace hatch with uv as our python environment and workfow tool (#2068) --- .evergreen/run-tests.sh | 74 +- .evergreen/scripts/configure-env.sh | 10 +- .evergreen/scripts/generate_config.py | 2 +- .evergreen/scripts/install-dependencies.sh | 13 + .evergreen/scripts/setup-dev-env.sh | 64 +- .evergreen/teardown-encryption.sh | 2 +- .gitignore | 3 +- CONTRIBUTING.md | 6 +- README.md | 17 +- doc/index.rst | 9 +- hatch.toml | 39 - justfile | 44 +- pymongo/compression_support.py | 2 +- pyproject.toml | 24 + requirements/typing.txt | 7 - strict_pyrightconfig.json | 1 + uv.lock | 2092 ++++++++++++++++++++ 17 files changed, 2234 insertions(+), 175 deletions(-) delete mode 100644 hatch.toml delete mode 100644 requirements/typing.txt create mode 100644 strict_pyrightconfig.json create mode 100644 uv.lock diff --git a/.evergreen/run-tests.sh b/.evergreen/run-tests.sh index 95fe10a6c3..d647955059 100755 --- a/.evergreen/run-tests.sh +++ b/.evergreen/run-tests.sh @@ -37,8 +37,7 @@ export PIP_QUIET=1 # Quiet by default export PIP_PREFER_BINARY=1 # Prefer binary dists by default set +x -python -c "import sys; sys.exit(sys.prefix == sys.base_prefix)" || (echo "Not inside a virtual env!"; exit 1) -PYTHON_IMPL=$(python -c "import platform; print(platform.python_implementation())") +PYTHON_IMPL=$(uv run python -c "import platform; print(platform.python_implementation())") # Try to source local Drivers Secrets if [ -f ./secrets-export.sh ]; then @@ -48,9 +47,13 @@ else echo "Not sourcing secrets" fi -# Ensure C extensions have compiled. +# Start compiling the args we'll pass to uv. +# Run in an isolated environment so as not to pollute the base venv. +UV_ARGS=("--isolated --extra test") + +# Ensure C extensions if applicable. if [ -z "${NO_EXT:-}" ] && [ "$PYTHON_IMPL" = "CPython" ]; then - python tools/fail_if_no_c.py + uv run tools/fail_if_no_c.py fi if [ "$AUTH" != "noauth" ]; then @@ -77,7 +80,7 @@ if [ "$AUTH" != "noauth" ]; then fi if [ -n "$TEST_ENTERPRISE_AUTH" ]; then - python -m pip install '.[gssapi]' + UV_ARGS+=("--extra gssapi") if [ "Windows_NT" = "$OS" ]; then echo "Setting GSSAPI_PASS" export GSSAPI_PASS=${SASL_PASS} @@ -118,24 +121,26 @@ if [ "$SSL" != "nossl" ]; then fi if [ "$COMPRESSORS" = "snappy" ]; then - python -m pip install '.[snappy]' + UV_ARGS+=("--extra snappy") elif [ "$COMPRESSORS" = "zstd" ]; then - python -m pip install zstandard + UV_ARGS+=("--extra zstandard") fi # PyOpenSSL test setup. if [ -n "$TEST_PYOPENSSL" ]; then - python -m pip install '.[ocsp]' + UV_ARGS+=("--extra ocsp") fi if [ -n "$TEST_ENCRYPTION" ] || [ -n "$TEST_FLE_AZURE_AUTO" ] || [ -n "$TEST_FLE_GCP_AUTO" ]; then - # Check for libmongocrypt checkout. + # Check for libmongocrypt download. if [ ! -d "libmongocrypt" ]; then echo "Run encryption setup first!" exit 1 fi - python -m pip install '.[encryption]' + UV_ARGS+=("--extra encryption") + # TODO: Test with 'pip install pymongocrypt' + UV_ARGS+=("--group pymongocrypt_source") # Use the nocrypto build to avoid dependency issues with older windows/python versions. BASE=$(pwd)/libmongocrypt/nocrypto @@ -155,21 +160,17 @@ if [ -n "$TEST_ENCRYPTION" ] || [ -n "$TEST_FLE_AZURE_AUTO" ] || [ -n "$TEST_FLE exit 1 fi export PYMONGOCRYPT_LIB - - # TODO: Test with 'pip install pymongocrypt' - if [ ! -d "libmongocrypt_git" ]; then - git clone https://github.com/mongodb/libmongocrypt.git libmongocrypt_git - fi - python -m pip install -U setuptools - python -m pip install ./libmongocrypt_git/bindings/python - python -c "import pymongocrypt; print('pymongocrypt version: '+pymongocrypt.__version__)" - python -c "import pymongocrypt; print('libmongocrypt version: '+pymongocrypt.libmongocrypt_version())" - # PATH is updated by PREPARE_SHELL for access to mongocryptd. + # Ensure pymongocrypt is working properly. + # shellcheck disable=SC2048 + uv run ${UV_ARGS[*]} python -c "import pymongocrypt; print('pymongocrypt version: '+pymongocrypt.__version__)" + # shellcheck disable=SC2048 + uv run ${UV_ARGS[*]} python -c "import pymongocrypt; print('libmongocrypt version: '+pymongocrypt.libmongocrypt_version())" + # PATH is updated by configure-env.sh for access to mongocryptd. fi if [ -n "$TEST_ENCRYPTION" ]; then if [ -n "$TEST_ENCRYPTION_PYOPENSSL" ]; then - python -m pip install '.[ocsp]' + UV_ARGS+=("--extra ocsp") fi if [ -n "$TEST_CRYPT_SHARED" ]; then @@ -214,22 +215,22 @@ if [ -n "$TEST_ATLAS" ]; then fi if [ -n "$TEST_OCSP" ]; then - python -m pip install ".[ocsp]" + UV_ARGS+=("--extra ocsp") TEST_SUITES="ocsp" fi if [ -n "$TEST_AUTH_AWS" ]; then - python -m pip install ".[aws]" + UV_ARGS+=("--extra aws") TEST_SUITES="auth_aws" fi if [ -n "$TEST_AUTH_OIDC" ]; then - python -m pip install ".[aws]" + UV_ARGS+=("--extra aws") TEST_SUITES="auth_oidc" fi if [ -n "$PERF_TEST" ]; then - python -m pip install simplejson + UV_ARGS+=("--group perf") start_time=$(date +%s) TEST_SUITES="perf" # PYTHON-4769 Run perf_test.py directly otherwise pytest's test collection negatively @@ -237,8 +238,8 @@ if [ -n "$PERF_TEST" ]; then TEST_ARGS="test/performance/perf_test.py $TEST_ARGS" fi -echo "Running $AUTH tests over $SSL with python $(which python)" -python -c 'import sys; print(sys.version)' +echo "Running $AUTH tests over $SSL with python $(uv python find)" +uv run python -c 'import sys; print(sys.version)' # Run the tests, and store the results in Evergreen compatible XUnit XML @@ -249,27 +250,30 @@ python -c 'import sys; print(sys.version)' if [ -n "$COVERAGE" ] && [ "$PYTHON_IMPL" = "CPython" ]; then # Keep in sync with combine-coverage.sh. # coverage >=5 is needed for relative_files=true. - python -m pip install pytest-cov "coverage>=5,<=7.5" + UV_ARGS+=("--group coverage") TEST_ARGS="$TEST_ARGS --cov" fi if [ -n "$GREEN_FRAMEWORK" ]; then - python -m pip install $GREEN_FRAMEWORK + UV_ARGS+=("--group $GREEN_FRAMEWORK") fi # Show the installed packages -PIP_QUIET=0 python -m pip list +# shellcheck disable=SC2048 +PIP_QUIET=0 uv run ${UV_ARGS[*]} --with pip pip list if [ -z "$GREEN_FRAMEWORK" ]; then # Use --capture=tee-sys so pytest prints test output inline: # https://docs.pytest.org/en/stable/how-to/capture-stdout-stderr.html - if [ -z "$TEST_SUITES" ]; then - python -m pytest -v --capture=tee-sys --durations=5 $TEST_ARGS - else - python -m pytest -v --capture=tee-sys --durations=5 -m $TEST_SUITES $TEST_ARGS + PYTEST_ARGS="-v --capture=tee-sys --durations=5 $TEST_ARGS" + if [ -n "$TEST_SUITES" ]; then + PYTEST_ARGS="-m $TEST_SUITES $PYTEST_ARGS" fi + # shellcheck disable=SC2048 + uv run ${UV_ARGS[*]} pytest $PYTEST_ARGS else - python green_framework_test.py $GREEN_FRAMEWORK -v $TEST_ARGS + # shellcheck disable=SC2048 + uv run ${UV_ARGS[*]} green_framework_test.py $GREEN_FRAMEWORK -v $TEST_ARGS fi # Handle perf test post actions. diff --git a/.evergreen/scripts/configure-env.sh b/.evergreen/scripts/configure-env.sh index ebbffcf1db..cb018d09f0 100755 --- a/.evergreen/scripts/configure-env.sh +++ b/.evergreen/scripts/configure-env.sh @@ -14,14 +14,16 @@ fi PROJECT_DIRECTORY="$(pwd)" DRIVERS_TOOLS="$(dirname $PROJECT_DIRECTORY)/drivers-tools" CARGO_HOME=${CARGO_HOME:-${DRIVERS_TOOLS}/.cargo} -HATCH_CONFIG=$PROJECT_DIRECTORY/hatch_config.toml +UV_TOOL_DIR=$PROJECT_DIRECTORY/.local/uv/tools +UV_CACHE_DIR=$PROJECT_DIRECTORY/.local/uv/cache # Python has cygwin path problems on Windows. Detect prospective mongo-orchestration home directory if [ "Windows_NT" = "${OS:-}" ]; then # Magic variable in cygwin DRIVERS_TOOLS=$(cygpath -m $DRIVERS_TOOLS) PROJECT_DIRECTORY=$(cygpath -m $PROJECT_DIRECTORY) CARGO_HOME=$(cygpath -m $CARGO_HOME) - HATCH_CONFIG=$(cygpath -m "$HATCH_CONFIG") + UV_TOOL_DIR=$(cygpath -m "$UV_TOOL_DIR") + UV_CACHE_DIR=$(cygpath -m "$UV_CACHE_DIR") fi SCRIPT_DIR="$PROJECT_DIRECTORY/.evergreen/scripts" @@ -62,7 +64,9 @@ export skip_ECS_auth_test="${skip_ECS_auth_test:-}" export CARGO_HOME="$CARGO_HOME" export TMPDIR="$MONGO_ORCHESTRATION_HOME/db" -export HATCH_CONFIG="$HATCH_CONFIG" +export UV_TOOL_DIR="$UV_TOOL_DIR" +export UV_CACHE_DIR="$UV_CACHE_DIR" +export UV_TOOL_BIN_DIR="$DRIVERS_TOOLS_BINARIES" export PATH="$MONGODB_BINARIES:$DRIVERS_TOOLS_BINARIES:$PATH" # shellcheck disable=SC2154 export PROJECT="${project:-mongo-python-driver}" diff --git a/.evergreen/scripts/generate_config.py b/.evergreen/scripts/generate_config.py index 2917e882d8..e9624ab109 100644 --- a/.evergreen/scripts/generate_config.py +++ b/.evergreen/scripts/generate_config.py @@ -6,7 +6,7 @@ # ] # /// -# Note: Run this file with `hatch run`, `pipx run`, or `uv run`. +# Note: Run this file with `pipx run`, or `uv run`. from __future__ import annotations import sys diff --git a/.evergreen/scripts/install-dependencies.sh b/.evergreen/scripts/install-dependencies.sh index 2b127889aa..39b77199bb 100755 --- a/.evergreen/scripts/install-dependencies.sh +++ b/.evergreen/scripts/install-dependencies.sh @@ -40,3 +40,16 @@ if ! command -v just 2>/dev/null; then fi echo "Installing just... done." fi + +# Install uv. +if ! command -v uv 2>/dev/null; then + echo "Installing uv..." + # On most systems we can install directly. + curl -LsSf https://astral.sh/uv/install.sh | env UV_INSTALL_DIR="$_BIN_DIR" INSTALLER_NO_MODIFY_PATH=1 sh || { + _pip_install uv uv + } + if ! command -v uv 2>/dev/null; then + export PATH="$PATH:$_BIN_DIR" + fi + echo "Installing uv... done." +fi diff --git a/.evergreen/scripts/setup-dev-env.sh b/.evergreen/scripts/setup-dev-env.sh index bfe0bc5b9a..3f8d0c4292 100755 --- a/.evergreen/scripts/setup-dev-env.sh +++ b/.evergreen/scripts/setup-dev-env.sh @@ -6,10 +6,14 @@ HERE=$(dirname ${BASH_SOURCE:-$0}) pushd "$(dirname "$(dirname $HERE)")" > /dev/null # Source the env file to pick up common variables. -if [ -f $HERE/scripts/env.sh ]; then - source $HERE/scripts/env.sh +if [ -f $HERE/env.sh ]; then + source $HERE/env.sh fi +# Ensure dependencies are installed. +. $HERE/install-dependencies.sh + + # Set the location of the python bin dir. if [ "Windows_NT" = "${OS:-}" ]; then BIN_DIR=.venv/Scripts @@ -17,8 +21,6 @@ else BIN_DIR=.venv/bin fi -. $HERE/install-dependencies.sh - # Ensure there is a python venv. if [ ! -d $BIN_DIR ]; then . .evergreen/utils.sh @@ -26,49 +28,15 @@ if [ ! -d $BIN_DIR ]; then if [ -z "${PYTHON_BINARY:-}" ]; then PYTHON_BINARY=$(find_python3) fi - - echo "Creating virtual environment..." - createvirtualenv "$PYTHON_BINARY" .venv - echo "Creating virtual environment... done." -fi - -# Activate the virtual env. -. $BIN_DIR/activate - -# Ensure there is a local hatch. -if [ ! -f $BIN_DIR/hatch ]; then - echo "Installing hatch..." - python -m pip install hatch || { - # CARGO_HOME is defined in configure-env.sh - export CARGO_HOME=${CARGO_HOME:-$HOME/.cargo/} - export RUSTUP_HOME="${CARGO_HOME}/.rustup" - ${DRIVERS_TOOLS}/.evergreen/install-rust.sh - source "${CARGO_HOME}/env" - python -m pip install hatch - } - echo "Installing hatch... done." -fi - -# Ensure hatch does not write to user or global locations. -HATCH_CONFIG=${HATCH_CONFIG:-hatch_config.toml} -if [ ! -f ${HATCH_CONFIG} ]; then - touch hatch_config.toml - hatch config restore - hatch config set dirs.data "$(pwd)/.hatch/data" - hatch config set dirs.cache "$(pwd)/.hatch/cache" + export UV_PYTHON=${PYTHON_BINARY} + echo "export UV_PYTHON=$UV_PYTHON" >> $HERE/env.sh fi - -# Ensure there is a local pre-commit if there is a git checkout. -if [ -d .git ]; then - if [ ! -f $BIN_DIR/pre-commit ]; then - python -m pip install pre-commit - fi - - # Ensure the pre-commit hook is installed. - if [ ! -f .git/hooks/pre-commit ]; then - pre-commit install - fi +echo "Using python $UV_PYTHON" +uv sync +uv run --with pip pip install -e . +echo "Setting up python environment... done." + +# Ensure there is a pre-commit hook if there is a git checkout. +if [ -d .git ] && [ ! -f .git/hooks/pre-commit ]; then + uv run pre-commit install fi - -# Install pymongo and its test deps. -python -m pip install ".[test]" diff --git a/.evergreen/teardown-encryption.sh b/.evergreen/teardown-encryption.sh index 88dc16bba8..5ce2f1d71b 100755 --- a/.evergreen/teardown-encryption.sh +++ b/.evergreen/teardown-encryption.sh @@ -7,4 +7,4 @@ if [ -z "${DRIVERS_TOOLS}" ]; then fi bash ${DRIVERS_TOOLS}/.evergreen/csfle/stop-servers.sh -rm -rf libmongocrypt/ libmongocrypt_git/ libmongocrypt.tar.gz mongocryptd.pid +rm -rf libmongocrypt/ libmongocrypt.tar.gz mongocryptd.pid diff --git a/.gitignore b/.gitignore index 01f896d316..2582c517fd 100644 --- a/.gitignore +++ b/.gitignore @@ -22,10 +22,9 @@ venv/ secrets-export.sh libmongocrypt.tar.gz libmongocrypt/ -libmongocrypt_git/ -hatch_config.toml .venv expansion.yml +*expansions.yml .evergreen/scripts/env.sh # Lambda temp files diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 5a46151760..536110fcfc 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -16,7 +16,7 @@ be of interest or that has already been addressed. ## Supported Interpreters -PyMongo supports CPython 3.9+ and PyPy3.9+. Language features not +PyMongo supports CPython 3.9+ and PyPy3.10+. Language features not supported by all interpreters can not be used. ## Style Guide @@ -28,7 +28,7 @@ including 4 space indents and 79 character line limits. - Avoid backward breaking changes if at all possible. - Write inline documentation for new classes and methods. -- We use [hatch](https://hatch.pypa.io/dev/) for python environment management and packaging. +- We use [uv](https://docs.astral.sh/uv/) for python environment management and packaging. - We use [just](https://just.systems/man/en/) as our task runner. - Write tests and make sure they pass (make sure you have a mongod running on the default port, then execute `just test` from the cmd @@ -194,7 +194,7 @@ the pages will re-render and the browser will automatically refresh. ## Running Tests Locally - Ensure you have started the appropriate Mongo Server(s). -- Run `just install` to set up `hatch` in a local virtual environment, or you can manually +- Run `just install` to set a local virtual environment, or you can manually create a virtual environment and run `pytest` directly. If you want to use a specific version of Python, remove the `.venv` folder and set `PYTHON_BINARY` before running `just install`. - Run `just test` or `pytest` to run all of the tests. diff --git a/README.md b/README.md index b8e0078101..962d0d958c 100644 --- a/README.md +++ b/README.md @@ -152,11 +152,6 @@ command: python -m pip install "pymongo[gssapi,aws,ocsp,snappy,zstd,encryption]" ``` -Additional dependencies are: - -- (to generate documentation or run tests) - [hatch](https://hatch.pypa.io/dev/) - ## Examples Here's a basic example (for more see the *examples* section of the @@ -201,8 +196,7 @@ ObjectId('4aba160ee23f6b543e000002') Documentation is available at [pymongo.readthedocs.io](https://pymongo.readthedocs.io/en/stable/). -Documentation can be generated by running **pip install hatch; hatch run doc:build**. Generated -documentation can be found in the `doc/build/html/` directory. +See the [contributing guide](./CONTRIBUTING.md#documentation) for how to build the documentation. ## Learning Resources @@ -213,10 +207,11 @@ Center](https://www.mongodb.com/developer/languages/python/). ## Testing -The easiest way to run the tests is to run *hatch run test:test** in the root -of the distribution. For example, +The easiest way to run the tests is to run the following from the repository root. ```bash -pip install hatch -hatch run test:test +pip install -e ".[test]" +pytest ``` + +For more advanced testing scenarios, see the [contributing guide](./CONTRIBUTING.md#running-tests-locally). diff --git a/doc/index.rst b/doc/index.rst index 0ac8bdec6e..079738314a 100644 --- a/doc/index.rst +++ b/doc/index.rst @@ -106,13 +106,8 @@ About This Documentation This documentation is generated using the `Sphinx `_ documentation generator. The source files for the documentation are located in the *doc/* directory of the -**PyMongo** distribution. To generate the docs locally run the -following command from the root directory of the **PyMongo** source: - -.. code-block:: bash - - $ pip install hatch - $ hatch run doc:build +**PyMongo** distribution. See the PyMongo `contributing guide `_ +for instructions on the building the docs from source. Indices and tables ------------------ diff --git a/hatch.toml b/hatch.toml deleted file mode 100644 index 15d0f25f07..0000000000 --- a/hatch.toml +++ /dev/null @@ -1,39 +0,0 @@ -# See https://hatch.pypa.io/dev/config/environment/overview/ - -[envs.doc] -features = ["docs"] -[envs.doc.scripts] -build = "sphinx-build -W -b html doc ./doc/_build/html" -serve = "sphinx-autobuild -W -b html doc --watch ./pymongo --watch ./bson --watch ./gridfs ./doc/_build/serve" -linkcheck = "sphinx-build -E -b linkcheck doc ./doc/_build/linkcheck" - -[envs.doctest] -features = ["docs","test"] -[envs.doctest.scripts] -test = "sphinx-build -E -b doctest doc ./doc/_build/doctest" - -[envs.typing] -pre-install-commands = [ - "pip install -q -r requirements/typing.txt", -] -[envs.typing.scripts] -check-mypy = [ - "mypy --install-types --non-interactive bson gridfs tools pymongo", - "mypy --install-types --non-interactive --config-file mypy_test.ini test", - "mypy --install-types --non-interactive test/test_typing.py test/test_typing_strict.py" -] -check-pyright = ["rm -f pyrightconfig.json", "pyright test/test_typing.py test/test_typing_strict.py"] -check-strict-pyright = [ - "echo '{{\"strict\": [\"tests/test_typing_strict.py\"]}}' > pyrightconfig.json", - "pyright test/test_typing_strict.py", - "rm -f pyrightconfig.json" -] -check = ["check-mypy", "check-pyright", "check-strict-pyright"] - -[envs.test] -features = ["test"] -[envs.test.scripts] -test = "pytest -v --durations=5 --maxfail=10 {args}" -test-eg = "bash ./.evergreen/run-tests.sh {args}" -test-async = "pytest -v --durations=5 --maxfail=10 -m default_async {args}" -test-mockupdb = ["pip install -U git+https://github.com/mongodb-labs/mongo-mockup-db@master", "test -m mockupdb"] diff --git a/justfile b/justfile index 23f0993c6b..6bcfe0c79c 100644 --- a/justfile +++ b/justfile @@ -3,10 +3,12 @@ set shell := ["bash", "-c"] set dotenv-load set dotenv-filename := "./.evergreen/scripts/env.sh" -# Handle cross-platform paths to local python cli tools. -python_bin_dir := if os_family() == "windows" { "./.venv/Scripts" } else { "./.venv/bin" } -hatch_bin := python_bin_dir + "/hatch" -pre_commit_bin := python_bin_dir + "/pre-commit" +# Commonly used command segments. +uv_run := "uv run --isolated " +typing_run := uv_run + "--group typing --extra aws --extra encryption --extra ocsp --extra snappy --extra test --extra zstd" +docs_run := uv_run + "--extra docs" +doc_build := "./doc/_build" +mypy_args := "--install-types --non-interactive" # Make the default recipe private so it doesn't show up in the list. [private] @@ -18,47 +20,55 @@ install: [group('docs')] docs: - {{hatch_bin}} run doc:build + {{docs_run}} sphinx-build -W -b html doc {{doc_build}}/html [group('docs')] docs-serve: - {{hatch_bin}} run doc:serve + {{docs_run}} sphinx-autobuild -W -b html doc --watch ./pymongo --watch ./bson --watch ./gridfs {{doc_build}}/serve [group('docs')] docs-linkcheck: - {{hatch_bin}} run doc:linkcheck + {{docs_run}} sphinx-build -E -b linkcheck doc {{doc_build}}/linkcheck [group('docs')] docs-test: - {{hatch_bin}} run doctest:test + {{docs_run}} --extra test sphinx-build -E -b doctest doc {{doc_build}}/doctest [group('typing')] typing: - {{hatch_bin}} run typing:check + just typing-mypy + just typing-pyright [group('typing')] typing-mypy: - {{hatch_bin}} run typing:mypy + {{typing_run}} mypy {{mypy_args}} bson gridfs tools pymongo + {{typing_run}} mypy {{mypy_args}} --config-file mypy_test.ini test + {{typing_run}} mypy {{mypy_args}} test/test_typing.py test/test_typing_strict.py + +[group('typing')] +typing-pyright: + {{typing_run}} pyright test/test_typing.py test/test_typing_strict.py + {{typing_run}} pyright -p strict_pyrightconfig.json test/test_typing_strict.py [group('lint')] lint: - {{pre_commit_bin}} run --all-files + {{uv_run}} pre-commit run --all-files [group('lint')] lint-manual: - {{pre_commit_bin}} run --all-files --hook-stage manual + {{uv_run}} pre-commit run --all-files --hook-stage manual [group('test')] -test *args: - {{hatch_bin}} run test:test {{args}} +test *args="-v --durations=5 --maxfail=10": + {{uv_run}} --extra test pytest {{args}} [group('test')] -test-mockupdb: - {{hatch_bin}} run test:test-mockupdb +test-mockupdb *args: + {{uv_run}} -v --extra test --group mockupdb pytest -m mockupdb {{args}} [group('test')] test-eg *args: - {{hatch_bin}} run test:test-eg {{args}} + bash ./.evergreen/run-tests.sh {{args}} [group('encryption')] setup-encryption: diff --git a/pymongo/compression_support.py b/pymongo/compression_support.py index c71e4bddcf..f49b56cc96 100644 --- a/pymongo/compression_support.py +++ b/pymongo/compression_support.py @@ -26,7 +26,7 @@ def _have_snappy() -> bool: try: - import snappy # type:ignore[import-not-found] # noqa: F401 + import snappy # type:ignore[import-untyped] # noqa: F401 return True except ImportError: diff --git a/pyproject.toml b/pyproject.toml index a9977a382c..69249ee4c6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -45,6 +45,30 @@ Documentation = "https://www.mongodb.com/docs/languages/python/pymongo-driver/cu Source = "https://github.com/mongodb/mongo-python-driver" Tracker = "https://jira.mongodb.org/projects/PYTHON/issues" +[dependency-groups] +dev = [ + "pre-commit>=4.0" +] +gevent = ["gevent"] +eventlet = ["eventlet"] +coverage = [ + "pytest-cov", + "coverage>=5,<=7.5" +] +mockupdb = [ + "mockupdb@git+https://github.com/mongodb-labs/mongo-mockup-db@master" +] +pymongocrypt_source = [ + "pymongocrypt@git+https://github.com/mongodb/libmongocrypt@master#subdirectory=bindings/python" +] +perf = ["simplejson"] +typing = [ + "mypy==1.14.1", + "pyright==1.1.392.post0", + "typing_extensions", + "pip" +] + # Used to call hatch_build.py [tool.hatch.build.hooks.custom] diff --git a/requirements/typing.txt b/requirements/typing.txt deleted file mode 100644 index b0f0c9c7fc..0000000000 --- a/requirements/typing.txt +++ /dev/null @@ -1,7 +0,0 @@ -mypy==1.14.1 -pyright==1.1.392.post0 -typing_extensions --r ./encryption.txt --r ./ocsp.txt --r ./zstd.txt --r ./aws.txt diff --git a/strict_pyrightconfig.json b/strict_pyrightconfig.json new file mode 100644 index 0000000000..9684598cd9 --- /dev/null +++ b/strict_pyrightconfig.json @@ -0,0 +1 @@ +{"strict": ["tests/test_typing_strict.py"]} \ No newline at end of file diff --git a/uv.lock b/uv.lock new file mode 100644 index 0000000000..e7f09f66fc --- /dev/null +++ b/uv.lock @@ -0,0 +1,2092 @@ +version = 1 +requires-python = ">=3.9" +resolution-markers = [ + "python_full_version >= '3.10'", + "python_full_version < '3.10'", +] + +[[package]] +name = "alabaster" +version = "0.7.16" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.10'", +] +sdist = { url = "https://files.pythonhosted.org/packages/c9/3e/13dd8e5ed9094e734ac430b5d0eb4f2bb001708a8b7856cbf8e084e001ba/alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65", size = 23776 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/32/34/d4e1c02d3bee589efb5dfa17f88ea08bdb3e3eac12bc475462aec52ed223/alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92", size = 13511 }, +] + +[[package]] +name = "alabaster" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", +] +sdist = { url = "https://files.pythonhosted.org/packages/a6/f8/d9c74d0daf3f742840fd818d69cfae176fa332022fd44e3469487d5a9420/alabaster-1.0.0.tar.gz", hash = "sha256:c00dca57bca26fa62a6d7d0a9fcce65f3e026e9bfe33e9c538fd3fbb2144fd9e", size = 24210 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/b3/6b4067be973ae96ba0d615946e314c5ae35f9f993eca561b356540bb0c2b/alabaster-1.0.0-py3-none-any.whl", hash = "sha256:fc6786402dc3fcb2de3cabd5fe455a2db534b371124f1f21de8731783dec828b", size = 13929 }, +] + +[[package]] +name = "anyio" +version = "4.8.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "idna" }, + { name = "sniffio" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a3/73/199a98fc2dae33535d6b8e8e6ec01f8c1d76c9adb096c6b7d64823038cde/anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a", size = 181126 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/46/eb/e7f063ad1fec6b3178a3cd82d1a3c4de82cccf283fc42746168188e1cdd5/anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a", size = 96041 }, +] + +[[package]] +name = "attrs" +version = "24.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/48/c8/6260f8ccc11f0917360fc0da435c5c9c7504e3db174d5a12a1494887b045/attrs-24.3.0.tar.gz", hash = "sha256:8f5c07333d543103541ba7be0e2ce16eeee8130cb0b3f9238ab904ce1e85baff", size = 805984 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/89/aa/ab0f7891a01eeb2d2e338ae8fecbe57fcebea1a24dbb64d45801bfab481d/attrs-24.3.0-py3-none-any.whl", hash = "sha256:ac96cd038792094f438ad1f6ff80837353805ac950cd2aa0e0625ef19850c308", size = 63397 }, +] + +[[package]] +name = "babel" +version = "2.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2a/74/f1bc80f23eeba13393b7222b11d95ca3af2c1e28edca18af487137eefed9/babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316", size = 9348104 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ed/20/bc79bc575ba2e2a7f70e8a1155618bb1301eaa5132a8271373a6903f73f8/babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b", size = 9587599 }, +] + +[[package]] +name = "beautifulsoup4" +version = "4.12.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "soupsieve" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b3/ca/824b1195773ce6166d388573fc106ce56d4a805bd7427b624e063596ec58/beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051", size = 581181 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b1/fe/e8c672695b37eecc5cbf43e1d0638d88d66ba3a44c4d321c796f4e59167f/beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed", size = 147925 }, +] + +[[package]] +name = "boto3" +version = "1.36.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "botocore" }, + { name = "jmespath" }, + { name = "s3transfer" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3a/e9/c0b2fa75efc4007ea1af21bc2fcbedf6e545c517fb90904d7f59850e02bf/boto3-1.36.2.tar.gz", hash = "sha256:fde1c29996b77274a60b7bc9f741525afa6267bb1716eb644a764fb7c124a0d2", size = 110998 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/17/c2/72a92794237b43f64141e156bc3a58bc36d18631f1a614e1e97a48b56447/boto3-1.36.2-py3-none-any.whl", hash = "sha256:76cfc9a705be46e8d22607efacc8d688c064f923d785a01c00b28e9a96425d1a", size = 139166 }, +] + +[[package]] +name = "botocore" +version = "1.36.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jmespath" }, + { name = "python-dateutil" }, + { name = "urllib3", version = "1.26.20", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "urllib3", version = "2.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c6/93/353b70cea6447e37789fc2d6f761fc12ae36fb4adb6f558055de8cdf655f/botocore-1.36.2.tar.gz", hash = "sha256:a1fe6610983f0214b0c7655fe6990b6a731746baf305b182976fc7b568fc3cb0", size = 13505440 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0c/fe/c066e8cb069027c12dbcf9066a7a4f3e9d2a31b10c7b174a8455ef1d0f46/botocore-1.36.2-py3-none-any.whl", hash = "sha256:bc3b7e3b573a48af2bd7116b80fe24f9a335b0b67314dcb2697a327d009abf29", size = 13302324 }, +] + +[[package]] +name = "certifi" +version = "2024.12.14" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0f/bd/1d41ee578ce09523c81a15426705dd20969f5abf006d1afe8aeff0dd776a/certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db", size = 166010 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a5/32/8f6669fc4798494966bf446c8c4a162e0b5d893dff088afddf76414f70e1/certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56", size = 164927 }, +] + +[[package]] +name = "cffi" +version = "1.17.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pycparser" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/90/07/f44ca684db4e4f08a3fdc6eeb9a0d15dc6883efc7b8c90357fdbf74e186c/cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14", size = 182191 }, + { url = "https://files.pythonhosted.org/packages/08/fd/cc2fedbd887223f9f5d170c96e57cbf655df9831a6546c1727ae13fa977a/cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67", size = 178592 }, + { url = "https://files.pythonhosted.org/packages/de/cc/4635c320081c78d6ffc2cab0a76025b691a91204f4aa317d568ff9280a2d/cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382", size = 426024 }, + { url = "https://files.pythonhosted.org/packages/b6/7b/3b2b250f3aab91abe5f8a51ada1b717935fdaec53f790ad4100fe2ec64d1/cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702", size = 448188 }, + { url = "https://files.pythonhosted.org/packages/d3/48/1b9283ebbf0ec065148d8de05d647a986c5f22586b18120020452fff8f5d/cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3", size = 455571 }, + { url = "https://files.pythonhosted.org/packages/40/87/3b8452525437b40f39ca7ff70276679772ee7e8b394934ff60e63b7b090c/cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6", size = 436687 }, + { url = "https://files.pythonhosted.org/packages/8d/fb/4da72871d177d63649ac449aec2e8a29efe0274035880c7af59101ca2232/cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17", size = 446211 }, + { url = "https://files.pythonhosted.org/packages/ab/a0/62f00bcb411332106c02b663b26f3545a9ef136f80d5df746c05878f8c4b/cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8", size = 461325 }, + { url = "https://files.pythonhosted.org/packages/36/83/76127035ed2e7e27b0787604d99da630ac3123bfb02d8e80c633f218a11d/cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e", size = 438784 }, + { url = "https://files.pythonhosted.org/packages/21/81/a6cd025db2f08ac88b901b745c163d884641909641f9b826e8cb87645942/cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be", size = 461564 }, + { url = "https://files.pythonhosted.org/packages/f8/fe/4d41c2f200c4a457933dbd98d3cf4e911870877bd94d9656cc0fcb390681/cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c", size = 171804 }, + { url = "https://files.pythonhosted.org/packages/d1/b6/0b0f5ab93b0df4acc49cae758c81fe4e5ef26c3ae2e10cc69249dfd8b3ab/cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15", size = 181299 }, + { url = "https://files.pythonhosted.org/packages/6b/f4/927e3a8899e52a27fa57a48607ff7dc91a9ebe97399b357b85a0c7892e00/cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401", size = 182264 }, + { url = "https://files.pythonhosted.org/packages/6c/f5/6c3a8efe5f503175aaddcbea6ad0d2c96dad6f5abb205750d1b3df44ef29/cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf", size = 178651 }, + { url = "https://files.pythonhosted.org/packages/94/dd/a3f0118e688d1b1a57553da23b16bdade96d2f9bcda4d32e7d2838047ff7/cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4", size = 445259 }, + { url = "https://files.pythonhosted.org/packages/2e/ea/70ce63780f096e16ce8588efe039d3c4f91deb1dc01e9c73a287939c79a6/cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41", size = 469200 }, + { url = "https://files.pythonhosted.org/packages/1c/a0/a4fa9f4f781bda074c3ddd57a572b060fa0df7655d2a4247bbe277200146/cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1", size = 477235 }, + { url = "https://files.pythonhosted.org/packages/62/12/ce8710b5b8affbcdd5c6e367217c242524ad17a02fe5beec3ee339f69f85/cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6", size = 459721 }, + { url = "https://files.pythonhosted.org/packages/ff/6b/d45873c5e0242196f042d555526f92aa9e0c32355a1be1ff8c27f077fd37/cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d", size = 467242 }, + { url = "https://files.pythonhosted.org/packages/1a/52/d9a0e523a572fbccf2955f5abe883cfa8bcc570d7faeee06336fbd50c9fc/cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6", size = 477999 }, + { url = "https://files.pythonhosted.org/packages/44/74/f2a2460684a1a2d00ca799ad880d54652841a780c4c97b87754f660c7603/cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f", size = 454242 }, + { url = "https://files.pythonhosted.org/packages/f8/4a/34599cac7dfcd888ff54e801afe06a19c17787dfd94495ab0c8d35fe99fb/cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b", size = 478604 }, + { url = "https://files.pythonhosted.org/packages/34/33/e1b8a1ba29025adbdcda5fb3a36f94c03d771c1b7b12f726ff7fef2ebe36/cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655", size = 171727 }, + { url = "https://files.pythonhosted.org/packages/3d/97/50228be003bb2802627d28ec0627837ac0bf35c90cf769812056f235b2d1/cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0", size = 181400 }, + { url = "https://files.pythonhosted.org/packages/5a/84/e94227139ee5fb4d600a7a4927f322e1d4aea6fdc50bd3fca8493caba23f/cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4", size = 183178 }, + { url = "https://files.pythonhosted.org/packages/da/ee/fb72c2b48656111c4ef27f0f91da355e130a923473bf5ee75c5643d00cca/cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c", size = 178840 }, + { url = "https://files.pythonhosted.org/packages/cc/b6/db007700f67d151abadf508cbfd6a1884f57eab90b1bb985c4c8c02b0f28/cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36", size = 454803 }, + { url = "https://files.pythonhosted.org/packages/1a/df/f8d151540d8c200eb1c6fba8cd0dfd40904f1b0682ea705c36e6c2e97ab3/cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5", size = 478850 }, + { url = "https://files.pythonhosted.org/packages/28/c0/b31116332a547fd2677ae5b78a2ef662dfc8023d67f41b2a83f7c2aa78b1/cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff", size = 485729 }, + { url = "https://files.pythonhosted.org/packages/91/2b/9a1ddfa5c7f13cab007a2c9cc295b70fbbda7cb10a286aa6810338e60ea1/cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99", size = 471256 }, + { url = "https://files.pythonhosted.org/packages/b2/d5/da47df7004cb17e4955df6a43d14b3b4ae77737dff8bf7f8f333196717bf/cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93", size = 479424 }, + { url = "https://files.pythonhosted.org/packages/0b/ac/2a28bcf513e93a219c8a4e8e125534f4f6db03e3179ba1c45e949b76212c/cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3", size = 484568 }, + { url = "https://files.pythonhosted.org/packages/d4/38/ca8a4f639065f14ae0f1d9751e70447a261f1a30fa7547a828ae08142465/cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8", size = 488736 }, + { url = "https://files.pythonhosted.org/packages/86/c5/28b2d6f799ec0bdecf44dced2ec5ed43e0eb63097b0f58c293583b406582/cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65", size = 172448 }, + { url = "https://files.pythonhosted.org/packages/50/b9/db34c4755a7bd1cb2d1603ac3863f22bcecbd1ba29e5ee841a4bc510b294/cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903", size = 181976 }, + { url = "https://files.pythonhosted.org/packages/8d/f8/dd6c246b148639254dad4d6803eb6a54e8c85c6e11ec9df2cffa87571dbe/cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e", size = 182989 }, + { url = "https://files.pythonhosted.org/packages/8b/f1/672d303ddf17c24fc83afd712316fda78dc6fce1cd53011b839483e1ecc8/cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2", size = 178802 }, + { url = "https://files.pythonhosted.org/packages/0e/2d/eab2e858a91fdff70533cab61dcff4a1f55ec60425832ddfdc9cd36bc8af/cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3", size = 454792 }, + { url = "https://files.pythonhosted.org/packages/75/b2/fbaec7c4455c604e29388d55599b99ebcc250a60050610fadde58932b7ee/cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683", size = 478893 }, + { url = "https://files.pythonhosted.org/packages/4f/b7/6e4a2162178bf1935c336d4da8a9352cccab4d3a5d7914065490f08c0690/cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5", size = 485810 }, + { url = "https://files.pythonhosted.org/packages/c7/8a/1d0e4a9c26e54746dc08c2c6c037889124d4f59dffd853a659fa545f1b40/cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4", size = 471200 }, + { url = "https://files.pythonhosted.org/packages/26/9f/1aab65a6c0db35f43c4d1b4f580e8df53914310afc10ae0397d29d697af4/cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd", size = 479447 }, + { url = "https://files.pythonhosted.org/packages/5f/e4/fb8b3dd8dc0e98edf1135ff067ae070bb32ef9d509d6cb0f538cd6f7483f/cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed", size = 484358 }, + { url = "https://files.pythonhosted.org/packages/f1/47/d7145bf2dc04684935d57d67dff9d6d795b2ba2796806bb109864be3a151/cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9", size = 488469 }, + { url = "https://files.pythonhosted.org/packages/bf/ee/f94057fa6426481d663b88637a9a10e859e492c73d0384514a17d78ee205/cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d", size = 172475 }, + { url = "https://files.pythonhosted.org/packages/7c/fc/6a8cb64e5f0324877d503c854da15d76c1e50eb722e320b15345c4d0c6de/cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a", size = 182009 }, + { url = "https://files.pythonhosted.org/packages/b9/ea/8bb50596b8ffbc49ddd7a1ad305035daa770202a6b782fc164647c2673ad/cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16", size = 182220 }, + { url = "https://files.pythonhosted.org/packages/ae/11/e77c8cd24f58285a82c23af484cf5b124a376b32644e445960d1a4654c3a/cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36", size = 178605 }, + { url = "https://files.pythonhosted.org/packages/ed/65/25a8dc32c53bf5b7b6c2686b42ae2ad58743f7ff644844af7cdb29b49361/cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8", size = 424910 }, + { url = "https://files.pythonhosted.org/packages/42/7a/9d086fab7c66bd7c4d0f27c57a1b6b068ced810afc498cc8c49e0088661c/cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576", size = 447200 }, + { url = "https://files.pythonhosted.org/packages/da/63/1785ced118ce92a993b0ec9e0d0ac8dc3e5dbfbcaa81135be56c69cabbb6/cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87", size = 454565 }, + { url = "https://files.pythonhosted.org/packages/74/06/90b8a44abf3556599cdec107f7290277ae8901a58f75e6fe8f970cd72418/cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0", size = 435635 }, + { url = "https://files.pythonhosted.org/packages/bd/62/a1f468e5708a70b1d86ead5bab5520861d9c7eacce4a885ded9faa7729c3/cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3", size = 445218 }, + { url = "https://files.pythonhosted.org/packages/5b/95/b34462f3ccb09c2594aa782d90a90b045de4ff1f70148ee79c69d37a0a5a/cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595", size = 460486 }, + { url = "https://files.pythonhosted.org/packages/fc/fc/a1e4bebd8d680febd29cf6c8a40067182b64f00c7d105f8f26b5bc54317b/cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a", size = 437911 }, + { url = "https://files.pythonhosted.org/packages/e6/c3/21cab7a6154b6a5ea330ae80de386e7665254835b9e98ecc1340b3a7de9a/cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e", size = 460632 }, + { url = "https://files.pythonhosted.org/packages/cb/b5/fd9f8b5a84010ca169ee49f4e4ad6f8c05f4e3545b72ee041dbbcb159882/cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7", size = 171820 }, + { url = "https://files.pythonhosted.org/packages/8c/52/b08750ce0bce45c143e1b5d7357ee8c55341b52bdef4b0f081af1eb248c2/cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662", size = 181290 }, +] + +[[package]] +name = "cfgv" +version = "3.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/11/74/539e56497d9bd1d484fd863dd69cbbfa653cd2aa27abfe35653494d85e94/cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560", size = 7114 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c5/55/51844dd50c4fc7a33b653bfaba4c2456f06955289ca770a5dbd5fd267374/cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9", size = 7249 }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/16/b0/572805e227f01586461c80e0fd25d65a2115599cc9dad142fee4b747c357/charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3", size = 123188 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0d/58/5580c1716040bc89206c77d8f74418caf82ce519aae06450393ca73475d1/charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de", size = 198013 }, + { url = "https://files.pythonhosted.org/packages/d0/11/00341177ae71c6f5159a08168bcb98c6e6d196d372c94511f9f6c9afe0c6/charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176", size = 141285 }, + { url = "https://files.pythonhosted.org/packages/01/09/11d684ea5819e5a8f5100fb0b38cf8d02b514746607934134d31233e02c8/charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037", size = 151449 }, + { url = "https://files.pythonhosted.org/packages/08/06/9f5a12939db324d905dc1f70591ae7d7898d030d7662f0d426e2286f68c9/charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f", size = 143892 }, + { url = "https://files.pythonhosted.org/packages/93/62/5e89cdfe04584cb7f4d36003ffa2936681b03ecc0754f8e969c2becb7e24/charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a", size = 146123 }, + { url = "https://files.pythonhosted.org/packages/a9/ac/ab729a15c516da2ab70a05f8722ecfccc3f04ed7a18e45c75bbbaa347d61/charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a", size = 147943 }, + { url = "https://files.pythonhosted.org/packages/03/d2/3f392f23f042615689456e9a274640c1d2e5dd1d52de36ab8f7955f8f050/charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247", size = 142063 }, + { url = "https://files.pythonhosted.org/packages/f2/e3/e20aae5e1039a2cd9b08d9205f52142329f887f8cf70da3650326670bddf/charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408", size = 150578 }, + { url = "https://files.pythonhosted.org/packages/8d/af/779ad72a4da0aed925e1139d458adc486e61076d7ecdcc09e610ea8678db/charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb", size = 153629 }, + { url = "https://files.pythonhosted.org/packages/c2/b6/7aa450b278e7aa92cf7732140bfd8be21f5f29d5bf334ae987c945276639/charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d", size = 150778 }, + { url = "https://files.pythonhosted.org/packages/39/f4/d9f4f712d0951dcbfd42920d3db81b00dd23b6ab520419626f4023334056/charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807", size = 146453 }, + { url = "https://files.pythonhosted.org/packages/49/2b/999d0314e4ee0cff3cb83e6bc9aeddd397eeed693edb4facb901eb8fbb69/charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f", size = 95479 }, + { url = "https://files.pythonhosted.org/packages/2d/ce/3cbed41cff67e455a386fb5e5dd8906cdda2ed92fbc6297921f2e4419309/charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f", size = 102790 }, + { url = "https://files.pythonhosted.org/packages/72/80/41ef5d5a7935d2d3a773e3eaebf0a9350542f2cab4eac59a7a4741fbbbbe/charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125", size = 194995 }, + { url = "https://files.pythonhosted.org/packages/7a/28/0b9fefa7b8b080ec492110af6d88aa3dea91c464b17d53474b6e9ba5d2c5/charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1", size = 139471 }, + { url = "https://files.pythonhosted.org/packages/71/64/d24ab1a997efb06402e3fc07317e94da358e2585165930d9d59ad45fcae2/charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3", size = 149831 }, + { url = "https://files.pythonhosted.org/packages/37/ed/be39e5258e198655240db5e19e0b11379163ad7070962d6b0c87ed2c4d39/charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd", size = 142335 }, + { url = "https://files.pythonhosted.org/packages/88/83/489e9504711fa05d8dde1574996408026bdbdbd938f23be67deebb5eca92/charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00", size = 143862 }, + { url = "https://files.pythonhosted.org/packages/c6/c7/32da20821cf387b759ad24627a9aca289d2822de929b8a41b6241767b461/charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12", size = 145673 }, + { url = "https://files.pythonhosted.org/packages/68/85/f4288e96039abdd5aeb5c546fa20a37b50da71b5cf01e75e87f16cd43304/charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77", size = 140211 }, + { url = "https://files.pythonhosted.org/packages/28/a3/a42e70d03cbdabc18997baf4f0227c73591a08041c149e710045c281f97b/charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146", size = 148039 }, + { url = "https://files.pythonhosted.org/packages/85/e4/65699e8ab3014ecbe6f5c71d1a55d810fb716bbfd74f6283d5c2aa87febf/charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd", size = 151939 }, + { url = "https://files.pythonhosted.org/packages/b1/82/8e9fe624cc5374193de6860aba3ea8070f584c8565ee77c168ec13274bd2/charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6", size = 149075 }, + { url = "https://files.pythonhosted.org/packages/3d/7b/82865ba54c765560c8433f65e8acb9217cb839a9e32b42af4aa8e945870f/charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8", size = 144340 }, + { url = "https://files.pythonhosted.org/packages/b5/b6/9674a4b7d4d99a0d2df9b215da766ee682718f88055751e1e5e753c82db0/charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b", size = 95205 }, + { url = "https://files.pythonhosted.org/packages/1e/ab/45b180e175de4402dcf7547e4fb617283bae54ce35c27930a6f35b6bef15/charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76", size = 102441 }, + { url = "https://files.pythonhosted.org/packages/0a/9a/dd1e1cdceb841925b7798369a09279bd1cf183cef0f9ddf15a3a6502ee45/charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545", size = 196105 }, + { url = "https://files.pythonhosted.org/packages/d3/8c/90bfabf8c4809ecb648f39794cf2a84ff2e7d2a6cf159fe68d9a26160467/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7", size = 140404 }, + { url = "https://files.pythonhosted.org/packages/ad/8f/e410d57c721945ea3b4f1a04b74f70ce8fa800d393d72899f0a40526401f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757", size = 150423 }, + { url = "https://files.pythonhosted.org/packages/f0/b8/e6825e25deb691ff98cf5c9072ee0605dc2acfca98af70c2d1b1bc75190d/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa", size = 143184 }, + { url = "https://files.pythonhosted.org/packages/3e/a2/513f6cbe752421f16d969e32f3583762bfd583848b763913ddab8d9bfd4f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d", size = 145268 }, + { url = "https://files.pythonhosted.org/packages/74/94/8a5277664f27c3c438546f3eb53b33f5b19568eb7424736bdc440a88a31f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616", size = 147601 }, + { url = "https://files.pythonhosted.org/packages/7c/5f/6d352c51ee763623a98e31194823518e09bfa48be2a7e8383cf691bbb3d0/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b", size = 141098 }, + { url = "https://files.pythonhosted.org/packages/78/d4/f5704cb629ba5ab16d1d3d741396aec6dc3ca2b67757c45b0599bb010478/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d", size = 149520 }, + { url = "https://files.pythonhosted.org/packages/c5/96/64120b1d02b81785f222b976c0fb79a35875457fa9bb40827678e54d1bc8/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a", size = 152852 }, + { url = "https://files.pythonhosted.org/packages/84/c9/98e3732278a99f47d487fd3468bc60b882920cef29d1fa6ca460a1fdf4e6/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9", size = 150488 }, + { url = "https://files.pythonhosted.org/packages/13/0e/9c8d4cb99c98c1007cc11eda969ebfe837bbbd0acdb4736d228ccaabcd22/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1", size = 146192 }, + { url = "https://files.pythonhosted.org/packages/b2/21/2b6b5b860781a0b49427309cb8670785aa543fb2178de875b87b9cc97746/charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35", size = 95550 }, + { url = "https://files.pythonhosted.org/packages/21/5b/1b390b03b1d16c7e382b561c5329f83cc06623916aab983e8ab9239c7d5c/charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f", size = 102785 }, + { url = "https://files.pythonhosted.org/packages/38/94/ce8e6f63d18049672c76d07d119304e1e2d7c6098f0841b51c666e9f44a0/charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda", size = 195698 }, + { url = "https://files.pythonhosted.org/packages/24/2e/dfdd9770664aae179a96561cc6952ff08f9a8cd09a908f259a9dfa063568/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313", size = 140162 }, + { url = "https://files.pythonhosted.org/packages/24/4e/f646b9093cff8fc86f2d60af2de4dc17c759de9d554f130b140ea4738ca6/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9", size = 150263 }, + { url = "https://files.pythonhosted.org/packages/5e/67/2937f8d548c3ef6e2f9aab0f6e21001056f692d43282b165e7c56023e6dd/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b", size = 142966 }, + { url = "https://files.pythonhosted.org/packages/52/ed/b7f4f07de100bdb95c1756d3a4d17b90c1a3c53715c1a476f8738058e0fa/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11", size = 144992 }, + { url = "https://files.pythonhosted.org/packages/96/2c/d49710a6dbcd3776265f4c923bb73ebe83933dfbaa841c5da850fe0fd20b/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f", size = 147162 }, + { url = "https://files.pythonhosted.org/packages/b4/41/35ff1f9a6bd380303dea55e44c4933b4cc3c4850988927d4082ada230273/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd", size = 140972 }, + { url = "https://files.pythonhosted.org/packages/fb/43/c6a0b685fe6910d08ba971f62cd9c3e862a85770395ba5d9cad4fede33ab/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2", size = 149095 }, + { url = "https://files.pythonhosted.org/packages/4c/ff/a9a504662452e2d2878512115638966e75633519ec11f25fca3d2049a94a/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886", size = 152668 }, + { url = "https://files.pythonhosted.org/packages/6c/71/189996b6d9a4b932564701628af5cee6716733e9165af1d5e1b285c530ed/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601", size = 150073 }, + { url = "https://files.pythonhosted.org/packages/e4/93/946a86ce20790e11312c87c75ba68d5f6ad2208cfb52b2d6a2c32840d922/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd", size = 145732 }, + { url = "https://files.pythonhosted.org/packages/cd/e5/131d2fb1b0dddafc37be4f3a2fa79aa4c037368be9423061dccadfd90091/charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407", size = 95391 }, + { url = "https://files.pythonhosted.org/packages/27/f2/4f9a69cc7712b9b5ad8fdb87039fd89abba997ad5cbe690d1835d40405b0/charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971", size = 102702 }, + { url = "https://files.pythonhosted.org/packages/7f/c0/b913f8f02836ed9ab32ea643c6fe4d3325c3d8627cf6e78098671cafff86/charset_normalizer-3.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41", size = 197867 }, + { url = "https://files.pythonhosted.org/packages/0f/6c/2bee440303d705b6fb1e2ec789543edec83d32d258299b16eed28aad48e0/charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f", size = 141385 }, + { url = "https://files.pythonhosted.org/packages/3d/04/cb42585f07f6f9fd3219ffb6f37d5a39b4fd2db2355b23683060029c35f7/charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2", size = 151367 }, + { url = "https://files.pythonhosted.org/packages/54/54/2412a5b093acb17f0222de007cc129ec0e0df198b5ad2ce5699355269dfe/charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770", size = 143928 }, + { url = "https://files.pythonhosted.org/packages/5a/6d/e2773862b043dcf8a221342954f375392bb2ce6487bcd9f2c1b34e1d6781/charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4", size = 146203 }, + { url = "https://files.pythonhosted.org/packages/b9/f8/ca440ef60d8f8916022859885f231abb07ada3c347c03d63f283bec32ef5/charset_normalizer-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537", size = 148082 }, + { url = "https://files.pythonhosted.org/packages/04/d2/42fd330901aaa4b805a1097856c2edf5095e260a597f65def493f4b8c833/charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496", size = 142053 }, + { url = "https://files.pythonhosted.org/packages/9e/af/3a97a4fa3c53586f1910dadfc916e9c4f35eeada36de4108f5096cb7215f/charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78", size = 150625 }, + { url = "https://files.pythonhosted.org/packages/26/ae/23d6041322a3556e4da139663d02fb1b3c59a23ab2e2b56432bd2ad63ded/charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7", size = 153549 }, + { url = "https://files.pythonhosted.org/packages/94/22/b8f2081c6a77cb20d97e57e0b385b481887aa08019d2459dc2858ed64871/charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6", size = 150945 }, + { url = "https://files.pythonhosted.org/packages/c7/0b/c5ec5092747f801b8b093cdf5610e732b809d6cb11f4c51e35fc28d1d389/charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294", size = 146595 }, + { url = "https://files.pythonhosted.org/packages/0c/5a/0b59704c38470df6768aa154cc87b1ac7c9bb687990a1559dc8765e8627e/charset_normalizer-3.4.1-cp39-cp39-win32.whl", hash = "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5", size = 95453 }, + { url = "https://files.pythonhosted.org/packages/85/2d/a9790237cb4d01a6d57afadc8573c8b73c609ade20b80f4cda30802009ee/charset_normalizer-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765", size = 102811 }, + { url = "https://files.pythonhosted.org/packages/0e/f6/65ecc6878a89bb1c23a086ea335ad4bf21a588990c3f535a227b9eea9108/charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85", size = 49767 }, +] + +[[package]] +name = "click" +version = "8.1.8" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188 }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, +] + +[[package]] +name = "coverage" +version = "7.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/52/d3/3ec80acdd57a0d6a1111b978ade388824f37126446fd6750d38bfaca949c/coverage-7.5.0.tar.gz", hash = "sha256:cf62d17310f34084c59c01e027259076479128d11e4661bb6c9acb38c5e19bb8", size = 798314 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/db/08d54dbc12fdfe5857b06105fd1235bdebb7da7c11cd1a0fae936556162a/coverage-7.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:432949a32c3e3f820af808db1833d6d1631664d53dd3ce487aa25d574e18ad1c", size = 210025 }, + { url = "https://files.pythonhosted.org/packages/a8/ff/02c4bcff1025b4a788aa3933e1cd1474d79de43e0d859273b3319ef43cd3/coverage-7.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2bd7065249703cbeb6d4ce679c734bef0ee69baa7bff9724361ada04a15b7e3b", size = 210499 }, + { url = "https://files.pythonhosted.org/packages/ab/b1/7820a8ef62adeebd37612af9d2369f4467a3bc2641dea1243450def5489e/coverage-7.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbfe6389c5522b99768a93d89aca52ef92310a96b99782973b9d11e80511f932", size = 238399 }, + { url = "https://files.pythonhosted.org/packages/2c/0e/23a388f3ce16c5ea01a454fef6a9039115abd40b748027d4fef18b3628a7/coverage-7.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:39793731182c4be939b4be0cdecde074b833f6171313cf53481f869937129ed3", size = 236676 }, + { url = "https://files.pythonhosted.org/packages/f8/81/e871b0d58ca5d6cc27d00b2f668ce09c4643ef00512341f3a592a81fb6cd/coverage-7.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85a5dbe1ba1bf38d6c63b6d2c42132d45cbee6d9f0c51b52c59aa4afba057517", size = 237467 }, + { url = "https://files.pythonhosted.org/packages/95/cb/42a6d34d5840635394f1e172aaa0e7cbd9346155e5004a8ee75d8e434c6b/coverage-7.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:357754dcdfd811462a725e7501a9b4556388e8ecf66e79df6f4b988fa3d0b39a", size = 243539 }, + { url = "https://files.pythonhosted.org/packages/6a/6a/18b3819919fdfd3e2062a75219b363f895f24ae5b80e72ffe5dfb1a7e9c8/coverage-7.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a81eb64feded34f40c8986869a2f764f0fe2db58c0530d3a4afbcde50f314880", size = 241725 }, + { url = "https://files.pythonhosted.org/packages/b5/3d/a0650978e8b8f78d269358421b7401acaf7cb89e957b2e1be5205ea5940e/coverage-7.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:51431d0abbed3a868e967f8257c5faf283d41ec882f58413cf295a389bb22e58", size = 242913 }, + { url = "https://files.pythonhosted.org/packages/8a/fe/95a74158fa0eda56d39783e918edc6fbb3dd3336be390557fc0a2815ecd4/coverage-7.5.0-cp310-cp310-win32.whl", hash = "sha256:f609ebcb0242d84b7adeee2b06c11a2ddaec5464d21888b2c8255f5fd6a98ae4", size = 212381 }, + { url = "https://files.pythonhosted.org/packages/4c/26/b276e0c70cba5059becce2594a268a2731d5b4f2386e9a6afdf37ffa3d44/coverage-7.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:6782cd6216fab5a83216cc39f13ebe30adfac2fa72688c5a4d8d180cd52e8f6a", size = 213225 }, + { url = "https://files.pythonhosted.org/packages/71/cf/964bb667ea37d64b25f04d4cfaf6232cdb7a6472e1f4a4faf0459ddcec40/coverage-7.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e768d870801f68c74c2b669fc909839660180c366501d4cc4b87efd6b0eee375", size = 210130 }, + { url = "https://files.pythonhosted.org/packages/aa/56/31edd4baa132fe2b991437e0acf3e36c50418370044a89b65518e5581f4c/coverage-7.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:84921b10aeb2dd453247fd10de22907984eaf80901b578a5cf0bb1e279a587cb", size = 210617 }, + { url = "https://files.pythonhosted.org/packages/26/6d/4cd14bd0221180c307fae4f8ef00dbd86a13507c25081858c620aa6fafd8/coverage-7.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:710c62b6e35a9a766b99b15cdc56d5aeda0914edae8bb467e9c355f75d14ee95", size = 242048 }, + { url = "https://files.pythonhosted.org/packages/84/60/7eb84255bd9947b140e0382721b0a1b25fd670b4f0f176f11f90b5632d02/coverage-7.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c379cdd3efc0658e652a14112d51a7668f6bfca7445c5a10dee7eabecabba19d", size = 239619 }, + { url = "https://files.pythonhosted.org/packages/76/6b/e8f4696194fdf3c19422f2a80ac10e03a9322f93e6c9ef57a89e03a8c8f7/coverage-7.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fea9d3ca80bcf17edb2c08a4704259dadac196fe5e9274067e7a20511fad1743", size = 241321 }, + { url = "https://files.pythonhosted.org/packages/3f/1c/6a6990fd2e6890807775852882b1ed0a8e50519a525252490b0c219aa8a5/coverage-7.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:41327143c5b1d715f5f98a397608f90ab9ebba606ae4e6f3389c2145410c52b1", size = 250419 }, + { url = "https://files.pythonhosted.org/packages/1a/be/b6422a1422381704dd015cc23e503acd1a44a6bdc4e59c75f8c6a2b24151/coverage-7.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:565b2e82d0968c977e0b0f7cbf25fd06d78d4856289abc79694c8edcce6eb2de", size = 248794 }, + { url = "https://files.pythonhosted.org/packages/9b/93/e8231000754d4a31fe9a6c550f6a436eacd2e50763ba2b418f10b2308e45/coverage-7.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cf3539007202ebfe03923128fedfdd245db5860a36810136ad95a564a2fdffff", size = 249873 }, + { url = "https://files.pythonhosted.org/packages/d3/6f/eb5aae80bf9d01d0f293121d4caa660ac968da2cb967f82547a7b5e8d65b/coverage-7.5.0-cp311-cp311-win32.whl", hash = "sha256:bf0b4b8d9caa8d64df838e0f8dcf68fb570c5733b726d1494b87f3da85db3a2d", size = 212380 }, + { url = "https://files.pythonhosted.org/packages/30/73/b70ab57f11b62f5ca9a83f43cae752fbbb4417bea651875235c32eb2fc2e/coverage-7.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:9c6384cc90e37cfb60435bbbe0488444e54b98700f727f16f64d8bfda0b84656", size = 213316 }, + { url = "https://files.pythonhosted.org/packages/36/db/f4e17ffb5ac2d125c72ee3b235c2e04f85a4296a6a9e17730e218af113d8/coverage-7.5.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fed7a72d54bd52f4aeb6c6e951f363903bd7d70bc1cad64dd1f087980d309ab9", size = 210340 }, + { url = "https://files.pythonhosted.org/packages/c3/bc/d7e832280f269be9e8d46cff5c4031b4840f1844674dc53ad93c5a9c1da6/coverage-7.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cbe6581fcff7c8e262eb574244f81f5faaea539e712a058e6707a9d272fe5b64", size = 210612 }, + { url = "https://files.pythonhosted.org/packages/54/84/543e2cd6c1de30c7522a0afcb040677957bac756dd8677bade8bdd9274ba/coverage-7.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad97ec0da94b378e593ef532b980c15e377df9b9608c7c6da3506953182398af", size = 242926 }, + { url = "https://files.pythonhosted.org/packages/ad/06/570533f747141b4fd727a193317e16c6e677ed7945e23a195b8f64e685a2/coverage-7.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd4bacd62aa2f1a1627352fe68885d6ee694bdaebb16038b6e680f2924a9b2cc", size = 240294 }, + { url = "https://files.pythonhosted.org/packages/fa/d9/ec4ba0913195d240d026670d41b91f3e5b9a8a143a385f93a09e97c90f5c/coverage-7.5.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:adf032b6c105881f9d77fa17d9eebe0ad1f9bfb2ad25777811f97c5362aa07f2", size = 242232 }, + { url = "https://files.pythonhosted.org/packages/d9/3f/1a613c32aa1980d20d6ca2f54faf800df04aafad6016d7132b3276d8715d/coverage-7.5.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4ba01d9ba112b55bfa4b24808ec431197bb34f09f66f7cb4fd0258ff9d3711b1", size = 249171 }, + { url = "https://files.pythonhosted.org/packages/b9/3b/e16b12693572fd69148453abc6ddcd20cbeae6f0a040b5ed6af2f75b646f/coverage-7.5.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f0bfe42523893c188e9616d853c47685e1c575fe25f737adf473d0405dcfa7eb", size = 247073 }, + { url = "https://files.pythonhosted.org/packages/e7/3e/04a05d40bb09f90a312296a32fb2c5ade2dfcf803edf777ad18b97547503/coverage-7.5.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a9a7ef30a1b02547c1b23fa9a5564f03c9982fc71eb2ecb7f98c96d7a0db5cf2", size = 248812 }, + { url = "https://files.pythonhosted.org/packages/ba/f7/3a8b7b0affe548227f3d45e248c0f22c5b55bff0ee062b49afc165b3ff25/coverage-7.5.0-cp312-cp312-win32.whl", hash = "sha256:3c2b77f295edb9fcdb6a250f83e6481c679335ca7e6e4a955e4290350f2d22a4", size = 212634 }, + { url = "https://files.pythonhosted.org/packages/7c/31/5f5286d2a5e21e1fe5670629bb24c79bf46383a092e74e00077e7a178e5c/coverage-7.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:427e1e627b0963ac02d7c8730ca6d935df10280d230508c0ba059505e9233475", size = 213460 }, + { url = "https://files.pythonhosted.org/packages/62/18/5573216d5b8db7d9f29189350dcd81830a03a624966c35f8201ae10df09c/coverage-7.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d0194d654e360b3e6cc9b774e83235bae6b9b2cac3be09040880bb0e8a88f4a1", size = 210014 }, + { url = "https://files.pythonhosted.org/packages/7c/0e/e98d6c6d569d65ff3195f095e6b006b3d7780fd6182322a25e7dfe0d53d3/coverage-7.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:33c020d3322662e74bc507fb11488773a96894aa82a622c35a5a28673c0c26f5", size = 210494 }, + { url = "https://files.pythonhosted.org/packages/d3/63/98e5a6b7ed1bfca874729ee309cc49a6d6658ab9e479a2b6d223ccc96e03/coverage-7.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbdf2cae14a06827bec50bd58e49249452d211d9caddd8bd80e35b53cb04631", size = 237996 }, + { url = "https://files.pythonhosted.org/packages/76/e4/d3c67a0a092127b8a3dffa2f75334a8cdb2cefc99e3d75a7f42cf1ff98a9/coverage-7.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3235d7c781232e525b0761730e052388a01548bd7f67d0067a253887c6e8df46", size = 236287 }, + { url = "https://files.pythonhosted.org/packages/12/7f/9b787ffc31bc39aa9e98c7005b698e7c6539bd222043e4a9c83b83c782a2/coverage-7.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2de4e546f0ec4b2787d625e0b16b78e99c3e21bc1722b4977c0dddf11ca84e", size = 237070 }, + { url = "https://files.pythonhosted.org/packages/31/ee/9998a0d855cad5f8e04062f7428b83c34aa643e5df468409593a480d5585/coverage-7.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4d0e206259b73af35c4ec1319fd04003776e11e859936658cb6ceffdeba0f5be", size = 243115 }, + { url = "https://files.pythonhosted.org/packages/16/94/1e348cd4445404c588ec8199adde0b45727b1d7989d8fb097d39c93e3da5/coverage-7.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2055c4fb9a6ff624253d432aa471a37202cd8f458c033d6d989be4499aed037b", size = 241315 }, + { url = "https://files.pythonhosted.org/packages/28/17/6fe1695d2a706e586b87a407598f4ed82dd218b2b43cdc790f695f259849/coverage-7.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:075299460948cd12722a970c7eae43d25d37989da682997687b34ae6b87c0ef0", size = 242467 }, + { url = "https://files.pythonhosted.org/packages/81/a2/1e550272c8b1f89b980504230b1a929de83d8f3d5ecb268477b32e5996a6/coverage-7.5.0-cp39-cp39-win32.whl", hash = "sha256:280132aada3bc2f0fac939a5771db4fbb84f245cb35b94fae4994d4c1f80dae7", size = 212394 }, + { url = "https://files.pythonhosted.org/packages/c9/48/7d3c31064c5adcc743fe5370cf7e198cee06cc0e2d37b5cbe930691a3f54/coverage-7.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:c58536f6892559e030e6924896a44098bc1290663ea12532c78cef71d0df8493", size = 213246 }, + { url = "https://files.pythonhosted.org/packages/34/81/f00ce7ef95479085feb01fa9e352b2b5b2b9d24767acf2266d6267a6dba9/coverage-7.5.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:2b57780b51084d5223eee7b59f0d4911c31c16ee5aa12737c7a02455829ff067", size = 202381 }, +] + +[package.optional-dependencies] +toml = [ + { name = "tomli", marker = "python_full_version <= '3.11'" }, +] + +[[package]] +name = "cramjam" +version = "2.9.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c9/68/09b6b5603d21a0c7d4362d513217a5079c47b1b7a88967c52dbef13db183/cramjam-2.9.1.tar.gz", hash = "sha256:336cc591d86cbd225d256813779f46624f857bc9c779db126271eff9ddc524ae", size = 47892 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/27/5d/0b03115fa6a95a6dd9be344cd186879b763f1a6fab57ae55ffe2777aa0a7/cramjam-2.9.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:8e82464d1e00fbbb12958999b8471ba5e9f3d9711954505a0a7b378762332e6f", size = 2136622 }, + { url = "https://files.pythonhosted.org/packages/6f/ac/a17644e182ede7e8e24fb3af038bc2c1cf3dd0447c935cb10409f21d099b/cramjam-2.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d2df8a6511cc08ef1fccd2e0c65e2ebc9f57574ec8376052a76851af5398810", size = 1927947 }, + { url = "https://files.pythonhosted.org/packages/9e/1e/e6c4f9695e4ba7b9c63160dcbfa76428bd3221930eedeb8f16364ab6f642/cramjam-2.9.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:21ea784e6c3f1843d3523ae0f03651dd06058b39eeb64beb82ee3b100fa83662", size = 2268766 }, + { url = "https://files.pythonhosted.org/packages/ab/37/4c81e5d039bdfc75a695abd426e6cdd9ab18a87f65d57837d78936cfa226/cramjam-2.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e0c5d98a4e791f0bbd0ffcb7dae879baeb2dcc357348a8dc2be0a8c10403a2a", size = 2108762 }, + { url = "https://files.pythonhosted.org/packages/b9/bb/3bf3a8877b9a4105b625d710410bd2bc83ef38d4a7fe4eaeb3895d997b2d/cramjam-2.9.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e076fd87089197cb61117c63dbe7712ad5eccb93968860eb3bae09b767bac813", size = 2086694 }, + { url = "https://files.pythonhosted.org/packages/c3/78/317b7ab6a9b0f24c45d56305a8288cdb6408f855034dc80530ed16a5cc6c/cramjam-2.9.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6d86b44933aea0151e4a2e1e6935448499849045c38167d288ca4c59d5b8cd4e", size = 2441698 }, + { url = "https://files.pythonhosted.org/packages/c5/2d/bc98992c29eb8647196b3bda814fd7ecfba6aff85177d44180be2aa320e8/cramjam-2.9.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7eb032549dec897b942ddcf80c1cdccbcb40629f15fc902731dbe6362da49326", size = 2759280 }, + { url = "https://files.pythonhosted.org/packages/dd/64/a4e54d74110c22477e467586935167d61fc7bae5284d393e76779b214a3e/cramjam-2.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf29b4def86ec503e329fe138842a9b79a997e3beb6c7809b05665a0d291edff", size = 2385128 }, + { url = "https://files.pythonhosted.org/packages/b0/1a/6ee093bf8a41cf31980175310abbbcdd1a39dadadbe96843112f42cef0fe/cramjam-2.9.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a36adf7d13b7accfa206e1c917f08924eb905b45aa8e62176509afa7b14db71e", size = 2373494 }, + { url = "https://files.pythonhosted.org/packages/9d/a6/1ae1f1a8ef559c2fab9d6d7f09b19995684e6727e617bf1b73967ee1c6be/cramjam-2.9.1-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:cf4ea758d98b6fad1b4b2d808d0de690d3162ac56c26968aea0af6524e3eb736", size = 2386900 }, + { url = "https://files.pythonhosted.org/packages/d9/e6/cf18deeaa0a96e7fc87f0eacde3c97e2893b573ac148ec746655570c18fc/cramjam-2.9.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:4826d6d81ea490fa7a3ae7a4b9729866a945ffac1f77fe57b71e49d6e1b21efd", size = 2400609 }, + { url = "https://files.pythonhosted.org/packages/90/97/98a8fa24249dc72a936a9a51a81407a399070ba4ceb528d0af291c760eff/cramjam-2.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:335103317475bf992953c58838152a4761fc3c87354000edbfc4d7e57cf05909", size = 2553159 }, + { url = "https://files.pythonhosted.org/packages/ae/6b/4f71f72bc3405f221ec8bd2ba869e324d5f87ddd58c14bf59f7937ea37ab/cramjam-2.9.1-cp310-cp310-win32.whl", hash = "sha256:258120cb1e3afc3443f756f9de161ed63eed56a2c31f6093e81c571c0f2dc9f6", size = 1817873 }, + { url = "https://files.pythonhosted.org/packages/8e/f4/32639916897d59e94d286b5b22263ce8c2903ecc93a868ebe9443ece8f12/cramjam-2.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:c60e5996aa02547d12bc2740d44e90e006b0f93100f53206f7abe6732ad56e69", size = 2092168 }, + { url = "https://files.pythonhosted.org/packages/6c/28/dd2b62be30ffe1fa8df10c99ba7b46abfbfb2fc6ace6acbbf9264a1a6b48/cramjam-2.9.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:b9db1debe48060e41a5b91af9193c524e473c57f6105462c5524a41f5aabdb88", size = 2136699 }, + { url = "https://files.pythonhosted.org/packages/03/c9/fcebeb6f06879af4226337715fbc42ffe543158bcba8c244bba144767897/cramjam-2.9.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f6f18f0242212d3409d26ce3874937b5b979cebd61f08b633a6ea893c32fc7b6", size = 1927934 }, + { url = "https://files.pythonhosted.org/packages/e8/f3/77032e4f5db4dfcc2b0365f92655b7d6f3fc1527ea5b637f9fb9f8156a65/cramjam-2.9.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:b5b1cd7d39242b2b903cf09cd4696b3a6e04dc537ffa9f3ac8668edae76eecb6", size = 2268584 }, + { url = "https://files.pythonhosted.org/packages/38/16/52175e94390f57196382783a3386c122ace7656b57339abaacdc9433b609/cramjam-2.9.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a47de0a68f5f4d9951250ef5af31f2a7228132caa9ed60994234f7eb98090d33", size = 2108599 }, + { url = "https://files.pythonhosted.org/packages/99/25/5f7476d127a8d18cd19a2f3fd25c0fe09ef7848069d23aac70bc96385eb6/cramjam-2.9.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e13c9a697881e5e38148958612dc6856967f5ff8cd7bba5ff751f2d6ac020aa4", size = 2086632 }, + { url = "https://files.pythonhosted.org/packages/7b/97/76ff3e1209add6acb7e2aa7997be48dc1f92ad66ee3e8fa1179eb2bb9b44/cramjam-2.9.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba560244bc1335b420b74e91e35f9d4e7f307a3be3a4603ce0f0d7e15a0acdf0", size = 2441757 }, + { url = "https://files.pythonhosted.org/packages/69/c4/228e74c30576556d11e54d86f356955cd86ff5e11bbfec74b66ed0dd237d/cramjam-2.9.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d47fd41ce260cf4f0ff0e788de961fab9e9c6844a05ce55d06ce31e06107bdc", size = 2758144 }, + { url = "https://files.pythonhosted.org/packages/4b/e7/0fd22e12c6a2879abc501979779d4b8cfe8fe692c708c2c0d1664e88fd79/cramjam-2.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84d154fbadece82935396eb6bcb502085d944d2fd13b07a94348364344370c2c", size = 2385062 }, + { url = "https://files.pythonhosted.org/packages/dd/9c/845592ddf9eb7130ae8bc5958a01d469304a43f8071effe164e2d239e3fa/cramjam-2.9.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:038df668ffb94d64d67b6ecc59cbd206745a425ffc0402897dde12d89fa6a870", size = 2373473 }, + { url = "https://files.pythonhosted.org/packages/10/c2/287cc94b7f8e87e3b0c21819d3a5deead99ebfdcb2b2d85cd04011b37292/cramjam-2.9.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:4125d8cd86fa08495d310e80926c2f0563f157b76862e7479f9b2cf94823ea0c", size = 2386816 }, + { url = "https://files.pythonhosted.org/packages/7c/22/869a1eeea53db4d9fbde6693a2465909762bffeab1a671e193c95b26f99f/cramjam-2.9.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4206ebdd1d1ef0f3f86c8c2f7c426aa4af6094f4f41e274601fd4c4569f37454", size = 2400713 }, + { url = "https://files.pythonhosted.org/packages/3f/89/ff988bd6427f01041ccb1a9104c05b6373ae476682d317b6844f4b40af92/cramjam-2.9.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ab687bef5c493732b9a4ab870542ee43f5eae0025f9c684c7cb399c3a85cb380", size = 2553081 }, + { url = "https://files.pythonhosted.org/packages/2e/68/13fa8561335de609f3cd40b132c1a3abbaf26d3c277e8b8a7446de34ef2c/cramjam-2.9.1-cp311-cp311-win32.whl", hash = "sha256:dda7698b6d7caeae1047adafebc4b43b2a82478234f6c2b45bc3edad854e0600", size = 1817782 }, + { url = "https://files.pythonhosted.org/packages/94/75/f3506ee802460e3b86a91e53bba1f67cf457fa04e4316fe7d5823ba5d28b/cramjam-2.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:872b00ff83e84bcbdc7e951af291ebe65eed20b09c47e7c4af21c312f90b796f", size = 2092227 }, + { url = "https://files.pythonhosted.org/packages/56/66/69a1c17331e38b02c78c923262fc315272de7c2618ef7eac8b3358969d90/cramjam-2.9.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:79417957972553502b217a0093532e48893c8b4ca30ccc941cefe9c72379df7c", size = 2132273 }, + { url = "https://files.pythonhosted.org/packages/3d/17/23d0b1d3301480e924545cdd27f2b949c50438949f64c74e800a09c12c37/cramjam-2.9.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce2b94117f373defc876f88e74e44049a9969223dbca3240415b71752d0422fb", size = 1926919 }, + { url = "https://files.pythonhosted.org/packages/8e/da/e9565f4abbbaa14645ccd7ce83f9631e90955454b87dc3ef9208aebc72e6/cramjam-2.9.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:67040e0fd84404885ec716a806bee6110f9960c3647e0ef1670aab3b7375a70a", size = 2271776 }, + { url = "https://files.pythonhosted.org/packages/88/ac/e6e0794ac01deb52e7a6a3e59720699abdee08d9b9c63a8d8874201d8155/cramjam-2.9.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bedb84e068b53c944bd08dcb501fd00d67daa8a917922356dd559b484ce7eab", size = 2109248 }, + { url = "https://files.pythonhosted.org/packages/22/0f/c3724b2dcdfbe7e07917803cf7a6db4a874818a6f8d2b95ca1ceaf177170/cramjam-2.9.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:06e3f97a379386d97debf08638a78b3d3850fdf6124755eb270b54905a169930", size = 2088611 }, + { url = "https://files.pythonhosted.org/packages/ce/16/929a5ae899ad6298f58e66622dc223476fe8e1d4e8dae608f4e1a34bfd09/cramjam-2.9.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:11118675e9c7952ececabc62f023290ee4f8ecf0bee0d2c7eb8d1c402ee9769d", size = 2438373 }, + { url = "https://files.pythonhosted.org/packages/2a/2a/ad473f1ca65d3285e8c1d99fc0289f5856224c0d452dabcf856fd4dcdd77/cramjam-2.9.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6b7de6b61b11545570e4d6033713f3599525efc615ee353a822be8f6b0c65b77", size = 2836669 }, + { url = "https://files.pythonhosted.org/packages/9b/5a/e9b4868ee27099a2a21646cf5ea5cf08c660eae90b55a395ada974dcf3fb/cramjam-2.9.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57ca8f3775324a9de3ee6f05ca172687ba258c0dea79f7e3a6b4112834982f2a", size = 2343995 }, + { url = "https://files.pythonhosted.org/packages/5f/c4/870a9b4524107bf85a207b82a42613318881238b20f2d237e62815af646a/cramjam-2.9.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9847dd6f288f1c56359f52acb48ff2df848ff3e3bff34d23855bbcf7016427cc", size = 2374270 }, + { url = "https://files.pythonhosted.org/packages/70/4b/b69e8e3951b7cec5e7da2539b7573bb396bed66af07d760b1878b00fd120/cramjam-2.9.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:8d1248dfa7f151e893ce819670f00879e4b7650b8d4c01279ce4f12140d68dd2", size = 2388789 }, + { url = "https://files.pythonhosted.org/packages/05/1a/af02f6192060413314735c0db61259d7279b0d8d99eee29eff2af09c5892/cramjam-2.9.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9da6d970281083bae91b914362de325414aa03c01fc806f6bb2cc006322ec834", size = 2402459 }, + { url = "https://files.pythonhosted.org/packages/20/9a/a4ab3e90d72eb4f2c1b983fa32b4050ba676f533ba15bd78158f0632295a/cramjam-2.9.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1c33bc095db5733c841a102b8693062be5db8cdac17b9782ebc00577c6a94480", size = 2518440 }, + { url = "https://files.pythonhosted.org/packages/35/3b/e632dd7e2c5c8a2af2d83144b00d6840f1afcf9c6959ed59ec5b0f925288/cramjam-2.9.1-cp312-cp312-win32.whl", hash = "sha256:9e9193cd4bb57e7acd3af24891526299244bfed88168945efdaa09af4e50720f", size = 1822630 }, + { url = "https://files.pythonhosted.org/packages/0e/a2/d1c46618b81b83578d58a62f3709046c4f3b4ddba10df4b9797cfe096b98/cramjam-2.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:15955dd75e80f66c1ea271167a5347661d9bdc365f894a57698c383c9b7d465c", size = 2094684 }, + { url = "https://files.pythonhosted.org/packages/85/45/f1d1e6ffdceb3b0c18511df2f8e779e03972459fb71d7c1ab0f6a5c063a3/cramjam-2.9.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:5a7797a2fff994fc5e323f7a967a35a3e37e3006ed21d64dcded086502f482af", size = 2131814 }, + { url = "https://files.pythonhosted.org/packages/3a/96/36bbd431fbf0fa2ff51fd2db4c3bead66e9e373693a8455d411d45125a68/cramjam-2.9.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d51b9b140b1df39a44bff7896d98a10da345b7d5f5ce92368d328c1c2c829167", size = 1926380 }, + { url = "https://files.pythonhosted.org/packages/67/c4/99b6507ec697d5f56d32c9c04614775004b05b7fa870725a492dc6b639eb/cramjam-2.9.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:07ac76b7f992556e7aa910244be11ece578cdf84f4d5d5297461f9a895e18312", size = 2271581 }, + { url = "https://files.pythonhosted.org/packages/cb/1b/6d55dff244fb22c0b686dd5a96a754c0638f8a94056beb27c457c6035cc5/cramjam-2.9.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d90a72608c7550cd7eba914668f6277bfb0b24f074d1f1bd9d061fcb6f2adbd6", size = 2109255 }, + { url = "https://files.pythonhosted.org/packages/ca/fb/b9fcf492a21a8d978c6f999025fce2c6656399448c017ed2fc859425f37f/cramjam-2.9.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:56495975401b1821dbe1f29cf222e23556232209a2fdb809fe8156d120ca9c7f", size = 2088323 }, + { url = "https://files.pythonhosted.org/packages/88/1f/69b523395aeaa201dbd53d203453288205a0c651e7c910161892d694eb4d/cramjam-2.9.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b695259e71fde6d5be66b77a4474523ced9ffe9fe8a34cb9b520ec1241a14d3", size = 2437930 }, + { url = "https://files.pythonhosted.org/packages/b0/2c/d07e802f1786c4082e8286db1087563e4fab31cd6534ed31523f1f9584d1/cramjam-2.9.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ab1e69dc4831bbb79b6d547077aae89074c83e8ad94eba1a3d80e94d2424fd02", size = 2836655 }, + { url = "https://files.pythonhosted.org/packages/1f/f5/6b425e82395c078bc95a7437b685e6bdba39d28c2b2986d79374fc1681aa/cramjam-2.9.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:440b489902bfb7a26d3fec1ca888007615336ff763d2a32a2fc40586548a0dbf", size = 2387107 }, + { url = "https://files.pythonhosted.org/packages/33/65/7bf97d89ba7607aaea5464af6f249e3d94c291acf73d72768367a3e361c0/cramjam-2.9.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:217fe22b41f8c3dce03852f828b059abfad11d1344a1df2f43d3eb8634b18d75", size = 2374006 }, + { url = "https://files.pythonhosted.org/packages/29/11/8b6c82eda6d0affbc15d7ab4dc758856eb4308e8ddae73300c1648f5aa0f/cramjam-2.9.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:95f3646ddc98af25af25d5692ae65966488a283813336ea9cf41b22e542e7c0d", size = 2388731 }, + { url = "https://files.pythonhosted.org/packages/48/25/6cdd57c0b1a83c98aec9029310d09a6c1a31e9e9fb8efd9001bd0cbea992/cramjam-2.9.1-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:6b19fc60ead1cae9795a5b359599da3a1c95d38f869bdfb51c441fd76b04e926", size = 2402131 }, + { url = "https://files.pythonhosted.org/packages/b4/e7/cbf80c9647fa582432aa833c4bdd20cf437917c8066ce653e3b78deff658/cramjam-2.9.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:8dc5207567459d049696f62a1fdfb220f3fe6aa0d722285d44753e12504dac6c", size = 2555296 }, + { url = "https://files.pythonhosted.org/packages/18/a6/fabe1959a980f5d2783a6c138311509dd168bd76e62018624a91cd1cbb41/cramjam-2.9.1-cp313-cp313-win32.whl", hash = "sha256:fbfe35929a61b914de9e5dbacde0cfbba86cbf5122f9285a24c14ed0b645490b", size = 1822484 }, + { url = "https://files.pythonhosted.org/packages/55/d5/24e4562771711711c466768c92097640ed97b0283abe9043ffb6c6d4cf04/cramjam-2.9.1-cp313-cp313-win_amd64.whl", hash = "sha256:06068bd191a82ad4fc1ac23d6f8627fb5e37ec4be0431711b9a2dbacaccfeddb", size = 2094445 }, + { url = "https://files.pythonhosted.org/packages/c7/5a/50523fd478390acb6ca8e57239f7cf79f7260dc0d16be89137d47823e50a/cramjam-2.9.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:af39006faddfc6253beb93ca821d544931cfee7f0177b99ff106dfd8fd6a2cd8", size = 2137158 }, + { url = "https://files.pythonhosted.org/packages/df/83/54eca302e431d51149074d8aad6ec588870c5797060e2142dfe6ca3599a8/cramjam-2.9.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b3291be0d3f73d5774d69013be4ab33978c777363b5312d14f62f77817c2f75a", size = 1927910 }, + { url = "https://files.pythonhosted.org/packages/6d/e9/5d38ffa5376c5bffcbd16545707d9dac6beffccd00410f0cc19d83d85ef7/cramjam-2.9.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1539fd758f0e57fad7913cebff8baaee871bb561ddf6fa710a427b74da6b6778", size = 2269458 }, + { url = "https://files.pythonhosted.org/packages/15/f3/99fedc4210db1967256e602fdcb60947585421fd659f8baeeeb4ea16e4c7/cramjam-2.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff362f68bd68ac0eccb445209238d589bba728fb6d7f2e9dc199e0ec3a61d6e0", size = 2109406 }, + { url = "https://files.pythonhosted.org/packages/f2/e9/f380e0c1bd03046c522da4fd6d43ea897ba0b832c78fc4ea5708d8c35c21/cramjam-2.9.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:23b9786d1d17686fb8d600ade2a19374c7188d4b8867efa9af0d8274a220aec7", size = 2086677 }, + { url = "https://files.pythonhosted.org/packages/13/a7/3ae887753f6d41f6e4af8e25654d103c56e13dda2f4b4d13acac570c65c1/cramjam-2.9.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8bc9c2c748aaf91863d89c4583f529c1c709485c94f8dfeb3ee48662d88e3258", size = 2442136 }, + { url = "https://files.pythonhosted.org/packages/de/a2/763fd98340936057e44ea0b870c9cdb87ad5f90d49e492e8a11cf74e7b29/cramjam-2.9.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd0fa9a0e7f18224b6d2d1d69dbdc3aecec80ef1393c59244159b131604a4395", size = 2754985 }, + { url = "https://files.pythonhosted.org/packages/33/31/7c8cdf6b16fcd46bad4a307c8203a58b7a2fddf6cb3aad9dc441c050f62f/cramjam-2.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ceef6e09ee22457997370882aa3c69de01e6dd0aaa2f953e1e87ad11641d042", size = 2385597 }, + { url = "https://files.pythonhosted.org/packages/dd/ba/ec0f3b5a3a90721bdb42f4f4989b60adf823d137f40365e83df0cd299378/cramjam-2.9.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1376f6fdbf0b30712413a0b4e51663a4938ae2f6b449f8e4635dbb3694db83cf", size = 2374339 }, + { url = "https://files.pythonhosted.org/packages/ff/0a/f5bccdc8d12821aed4473a427e9eb8282a38c9337a30e02ed102b18941bf/cramjam-2.9.1-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:342fb946f8d3e9e35b837288b03ab23cfbe0bb5a30e582ed805ef79706823a96", size = 2386933 }, + { url = "https://files.pythonhosted.org/packages/a0/6e/ce3ffad2b3b8cb73156a19345e27a2e27fb5be79b64f2c81b0c6d6e16c57/cramjam-2.9.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a237064a6e2c2256c9a1cf2beb7c971382190c0f1eb2e810e02e971881756132", size = 2400860 }, + { url = "https://files.pythonhosted.org/packages/32/a9/e4509e5dfc8f41d9e7f9fdddbf567967937303621d410197c86b11d6a3e4/cramjam-2.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53145fc9f2319c1245d4329e1da8cfacd6e35e27090c07c0b9d453ae2bbdac3e", size = 2553681 }, + { url = "https://files.pythonhosted.org/packages/0a/83/52401c5c654ddff2850d890b0f1cfc355ff6887c6def420d0c8d8178ff97/cramjam-2.9.1-cp39-cp39-win32.whl", hash = "sha256:8a9f52c27292c21457f43c4ce124939302a9acfb62295e7cda8667310563a5a3", size = 1818130 }, + { url = "https://files.pythonhosted.org/packages/93/b3/1645986d8b915fd0426a7224cd00c2c17c32b4d69bc5faad3fb3f5fd5081/cramjam-2.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:8097ee39b61c86848a443c0b25b2df1de6b331fd512b20836a4f5cfde51ab255", size = 2092440 }, + { url = "https://files.pythonhosted.org/packages/bc/91/3f7884172573072a4280bc8bc19b7562b2cd66d2a65576b11e72115cd5fe/cramjam-2.9.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:86824c695688fcd06c5ac9bbd3fea9bdfb4cca194b1e706fbf11a629df48d2b4", size = 2159537 }, + { url = "https://files.pythonhosted.org/packages/ef/49/a0a89e9c45413e89a1e408d4ab416c0f88f19f6db7571fd5c517e429e276/cramjam-2.9.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:27571bfa5a5d618604696747d0dc1d2a99b5906c967c8dee53c13a7107edfde6", size = 1936244 }, + { url = "https://files.pythonhosted.org/packages/26/f7/6422b9e4d148f1a351c0358a95d59023f25cab76609b180804f6a3ed17e9/cramjam-2.9.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb01f6e38719818778144d3165a89ea1ad9dc58c6342b7f20aa194c70f34cbd1", size = 2119487 }, + { url = "https://files.pythonhosted.org/packages/b5/59/6fc930217f7ae085eca6d22d3477cd0145a105cdc39e63b834cb0c1b25e3/cramjam-2.9.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b5cef5cf40725fe64592af9ec163e7389855077700678a1d94bec549403a74d", size = 2400910 }, + { url = "https://files.pythonhosted.org/packages/2d/36/7e53cf5aaed4b446490e298f7571e69ce15d0dfb148feabe8bf02e58827f/cramjam-2.9.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ac48b978aa0675f62b642750e798c394a64d25ce852e4e541f69bef9a564c2f0", size = 2100860 }, +] + +[[package]] +name = "cryptography" +version = "44.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/91/4c/45dfa6829acffa344e3967d6006ee4ae8be57af746ae2eba1c431949b32c/cryptography-44.0.0.tar.gz", hash = "sha256:cd4e834f340b4293430701e772ec543b0fbe6c2dea510a5286fe0acabe153a02", size = 710657 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/55/09/8cc67f9b84730ad330b3b72cf867150744bf07ff113cda21a15a1c6d2c7c/cryptography-44.0.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:84111ad4ff3f6253820e6d3e58be2cc2a00adb29335d4cacb5ab4d4d34f2a123", size = 6541833 }, + { url = "https://files.pythonhosted.org/packages/7e/5b/3759e30a103144e29632e7cb72aec28cedc79e514b2ea8896bb17163c19b/cryptography-44.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b15492a11f9e1b62ba9d73c210e2416724633167de94607ec6069ef724fad092", size = 3922710 }, + { url = "https://files.pythonhosted.org/packages/5f/58/3b14bf39f1a0cfd679e753e8647ada56cddbf5acebffe7db90e184c76168/cryptography-44.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:831c3c4d0774e488fdc83a1923b49b9957d33287de923d58ebd3cec47a0ae43f", size = 4137546 }, + { url = "https://files.pythonhosted.org/packages/98/65/13d9e76ca19b0ba5603d71ac8424b5694415b348e719db277b5edc985ff5/cryptography-44.0.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:761817a3377ef15ac23cd7834715081791d4ec77f9297ee694ca1ee9c2c7e5eb", size = 3915420 }, + { url = "https://files.pythonhosted.org/packages/b1/07/40fe09ce96b91fc9276a9ad272832ead0fddedcba87f1190372af8e3039c/cryptography-44.0.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3c672a53c0fb4725a29c303be906d3c1fa99c32f58abe008a82705f9ee96f40b", size = 4154498 }, + { url = "https://files.pythonhosted.org/packages/75/ea/af65619c800ec0a7e4034207aec543acdf248d9bffba0533342d1bd435e1/cryptography-44.0.0-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:4ac4c9f37eba52cb6fbeaf5b59c152ea976726b865bd4cf87883a7e7006cc543", size = 3932569 }, + { url = "https://files.pythonhosted.org/packages/c7/af/d1deb0c04d59612e3d5e54203159e284d3e7a6921e565bb0eeb6269bdd8a/cryptography-44.0.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ed3534eb1090483c96178fcb0f8893719d96d5274dfde98aa6add34614e97c8e", size = 4016721 }, + { url = "https://files.pythonhosted.org/packages/bd/69/7ca326c55698d0688db867795134bdfac87136b80ef373aaa42b225d6dd5/cryptography-44.0.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:f3f6fdfa89ee2d9d496e2c087cebef9d4fcbb0ad63c40e821b39f74bf48d9c5e", size = 4240915 }, + { url = "https://files.pythonhosted.org/packages/ef/d4/cae11bf68c0f981e0413906c6dd03ae7fa864347ed5fac40021df1ef467c/cryptography-44.0.0-cp37-abi3-win32.whl", hash = "sha256:eb33480f1bad5b78233b0ad3e1b0be21e8ef1da745d8d2aecbb20671658b9053", size = 2757925 }, + { url = "https://files.pythonhosted.org/packages/64/b1/50d7739254d2002acae64eed4fc43b24ac0cc44bf0a0d388d1ca06ec5bb1/cryptography-44.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:abc998e0c0eee3c8a1904221d3f67dcfa76422b23620173e28c11d3e626c21bd", size = 3202055 }, + { url = "https://files.pythonhosted.org/packages/11/18/61e52a3d28fc1514a43b0ac291177acd1b4de00e9301aaf7ef867076ff8a/cryptography-44.0.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:660cb7312a08bc38be15b696462fa7cc7cd85c3ed9c576e81f4dc4d8b2b31591", size = 6542801 }, + { url = "https://files.pythonhosted.org/packages/1a/07/5f165b6c65696ef75601b781a280fc3b33f1e0cd6aa5a92d9fb96c410e97/cryptography-44.0.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1923cb251c04be85eec9fda837661c67c1049063305d6be5721643c22dd4e2b7", size = 3922613 }, + { url = "https://files.pythonhosted.org/packages/28/34/6b3ac1d80fc174812486561cf25194338151780f27e438526f9c64e16869/cryptography-44.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:404fdc66ee5f83a1388be54300ae978b2efd538018de18556dde92575e05defc", size = 4137925 }, + { url = "https://files.pythonhosted.org/packages/d0/c7/c656eb08fd22255d21bc3129625ed9cd5ee305f33752ef2278711b3fa98b/cryptography-44.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:c5eb858beed7835e5ad1faba59e865109f3e52b3783b9ac21e7e47dc5554e289", size = 3915417 }, + { url = "https://files.pythonhosted.org/packages/ef/82/72403624f197af0db6bac4e58153bc9ac0e6020e57234115db9596eee85d/cryptography-44.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f53c2c87e0fb4b0c00fa9571082a057e37690a8f12233306161c8f4b819960b7", size = 4155160 }, + { url = "https://files.pythonhosted.org/packages/a2/cd/2f3c440913d4329ade49b146d74f2e9766422e1732613f57097fea61f344/cryptography-44.0.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:9e6fc8a08e116fb7c7dd1f040074c9d7b51d74a8ea40d4df2fc7aa08b76b9e6c", size = 3932331 }, + { url = "https://files.pythonhosted.org/packages/7f/df/8be88797f0a1cca6e255189a57bb49237402b1880d6e8721690c5603ac23/cryptography-44.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:d2436114e46b36d00f8b72ff57e598978b37399d2786fd39793c36c6d5cb1c64", size = 4017372 }, + { url = "https://files.pythonhosted.org/packages/af/36/5ccc376f025a834e72b8e52e18746b927f34e4520487098e283a719c205e/cryptography-44.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a01956ddfa0a6790d594f5b34fc1bfa6098aca434696a03cfdbe469b8ed79285", size = 4239657 }, + { url = "https://files.pythonhosted.org/packages/46/b0/f4f7d0d0bcfbc8dd6296c1449be326d04217c57afb8b2594f017eed95533/cryptography-44.0.0-cp39-abi3-win32.whl", hash = "sha256:eca27345e1214d1b9f9490d200f9db5a874479be914199194e746c893788d417", size = 2758672 }, + { url = "https://files.pythonhosted.org/packages/97/9b/443270b9210f13f6ef240eff73fd32e02d381e7103969dc66ce8e89ee901/cryptography-44.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:708ee5f1bafe76d041b53a4f95eb28cdeb8d18da17e597d46d7833ee59b97ede", size = 3202071 }, + { url = "https://files.pythonhosted.org/packages/77/d4/fea74422326388bbac0c37b7489a0fcb1681a698c3b875959430ba550daa/cryptography-44.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:37d76e6863da3774cd9db5b409a9ecfd2c71c981c38788d3fcfaf177f447b731", size = 3338857 }, + { url = "https://files.pythonhosted.org/packages/1a/aa/ba8a7467c206cb7b62f09b4168da541b5109838627f582843bbbe0235e8e/cryptography-44.0.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:f677e1268c4e23420c3acade68fac427fffcb8d19d7df95ed7ad17cdef8404f4", size = 3850615 }, + { url = "https://files.pythonhosted.org/packages/89/fa/b160e10a64cc395d090105be14f399b94e617c879efd401188ce0fea39ee/cryptography-44.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f5e7cb1e5e56ca0933b4873c0220a78b773b24d40d186b6738080b73d3d0a756", size = 4081622 }, + { url = "https://files.pythonhosted.org/packages/47/8f/20ff0656bb0cf7af26ec1d01f780c5cfbaa7666736063378c5f48558b515/cryptography-44.0.0-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:8b3e6eae66cf54701ee7d9c83c30ac0a1e3fa17be486033000f2a73a12ab507c", size = 3867546 }, + { url = "https://files.pythonhosted.org/packages/38/d9/28edf32ee2fcdca587146bcde90102a7319b2f2c690edfa627e46d586050/cryptography-44.0.0-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:be4ce505894d15d5c5037167ffb7f0ae90b7be6f2a98f9a5c3442395501c32fa", size = 4090937 }, + { url = "https://files.pythonhosted.org/packages/cc/9d/37e5da7519de7b0b070a3fedd4230fe76d50d2a21403e0f2153d70ac4163/cryptography-44.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:62901fb618f74d7d81bf408c8719e9ec14d863086efe4185afd07c352aee1d2c", size = 3128774 }, +] + +[[package]] +name = "decorator" +version = "5.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/66/0c/8d907af351aa16b42caae42f9d6aa37b900c67308052d10fdce809f8d952/decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330", size = 35016 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d5/50/83c593b07763e1161326b3b8c6686f0f4b0f24d5526546bee538c89837d6/decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186", size = 9073 }, +] + +[[package]] +name = "distlib" +version = "0.3.9" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0d/dd/1bec4c5ddb504ca60fc29472f3d27e8d4da1257a854e1d96742f15c1d02d/distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403", size = 613923 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/91/a1/cf2472db20f7ce4a6be1253a81cfdf85ad9c7885ffbed7047fb72c24cf87/distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87", size = 468973 }, +] + +[[package]] +name = "dnspython" +version = "2.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b5/4a/263763cb2ba3816dd94b08ad3a33d5fdae34ecb856678773cc40a3605829/dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1", size = 345197 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/68/1b/e0a87d256e40e8c888847551b20a017a6b98139178505dc7ffb96f04e954/dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86", size = 313632 }, +] + +[[package]] +name = "docutils" +version = "0.21.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ae/ed/aefcc8cd0ba62a0560c3c18c33925362d46c6075480bfa4df87b28e169a9/docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f", size = 2204444 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8f/d7/9322c609343d929e75e7e5e6255e614fcc67572cfd083959cdef3b7aad79/docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2", size = 587408 }, +] + +[[package]] +name = "eventlet" +version = "0.38.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "dnspython" }, + { name = "greenlet" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a6/4e/f974cc85b8d19b31176e0cca90e1650156f385c9c294a96fc42846ca75e9/eventlet-0.38.2.tar.gz", hash = "sha256:6a46823af1dca7d29cf04c0d680365805435473c3acbffc176765c7f8787edac", size = 561526 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/aa/07/00feb2c708d71796e190a3051a0d530a4922bfb6b346aa8302725840698c/eventlet-0.38.2-py3-none-any.whl", hash = "sha256:4a2e3cbc53917c8f39074ccf689501168563d3a4df59e9cddd5e9d3b7f85c599", size = 363192 }, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/09/35/2495c4ac46b980e4ca1f6ad6db102322ef3ad2410b79fdde159a4b0f3b92/exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc", size = 28883 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/02/cc/b7e31358aac6ed1ef2bb790a9746ac2c69bcb3c8588b41616914eb106eaf/exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b", size = 16453 }, +] + +[[package]] +name = "filelock" +version = "3.16.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9d/db/3ef5bb276dae18d6ec2124224403d1d67bccdbefc17af4cc8f553e341ab1/filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435", size = 18037 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b9/f8/feced7779d755758a52d1f6635d990b8d98dc0a29fa568bbe0625f18fdf3/filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0", size = 16163 }, +] + +[[package]] +name = "furo" +version = "2024.8.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "beautifulsoup4" }, + { name = "pygments" }, + { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "sphinx-basic-ng" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a0/e2/d351d69a9a9e4badb4a5be062c2d0e87bd9e6c23b5e57337fef14bef34c8/furo-2024.8.6.tar.gz", hash = "sha256:b63e4cee8abfc3136d3bc03a3d45a76a850bada4d6374d24c1716b0e01394a01", size = 1661506 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/27/48/e791a7ed487dbb9729ef32bb5d1af16693d8925f4366befef54119b2e576/furo-2024.8.6-py3-none-any.whl", hash = "sha256:6cd97c58b47813d3619e63e9081169880fbe331f0ca883c871ff1f3f11814f5c", size = 341333 }, +] + +[[package]] +name = "gevent" +version = "24.11.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "platform_python_implementation == 'CPython' and sys_platform == 'win32'" }, + { name = "greenlet", marker = "platform_python_implementation == 'CPython'" }, + { name = "zope-event" }, + { name = "zope-interface" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ab/75/a53f1cb732420f5e5d79b2563fc3504d22115e7ecfe7966e5cf9b3582ae7/gevent-24.11.1.tar.gz", hash = "sha256:8bd1419114e9e4a3ed33a5bad766afff9a3cf765cb440a582a1b3a9bc80c1aca", size = 5976624 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/36/7d/27ed3603f4bf96b36fb2746e923e033bc600c6684de8fe164d64eb8c4dcc/gevent-24.11.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:92fe5dfee4e671c74ffaa431fd7ffd0ebb4b339363d24d0d944de532409b935e", size = 2998254 }, + { url = "https://files.pythonhosted.org/packages/a8/03/a8f6c70f50a644a79e75d9f15e6f1813115d34c3c55528e4669a9316534d/gevent-24.11.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7bfcfe08d038e1fa6de458891bca65c1ada6d145474274285822896a858c870", size = 4817711 }, + { url = "https://files.pythonhosted.org/packages/f0/05/4f9bc565520a18f107464d40ac15a91708431362c797e77fbb5e7ff26e64/gevent-24.11.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7398c629d43b1b6fd785db8ebd46c0a353880a6fab03d1cf9b6788e7240ee32e", size = 4934468 }, + { url = "https://files.pythonhosted.org/packages/4a/7d/f15561eeebecbebc0296dd7bebea10ac4af0065d98249e3d8c4998e68edd/gevent-24.11.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d7886b63ebfb865178ab28784accd32f287d5349b3ed71094c86e4d3ca738af5", size = 5014067 }, + { url = "https://files.pythonhosted.org/packages/67/c1/07eff117a600fc3c9bd4e3a1ff3b726f146ee23ce55981156547ccae0c85/gevent-24.11.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9ca80711e6553880974898d99357fb649e062f9058418a92120ca06c18c3c59", size = 6625531 }, + { url = "https://files.pythonhosted.org/packages/4b/72/43f76ab6b18e5e56b1003c844829971f3044af08b39b3c9040559be00a2b/gevent-24.11.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e24181d172f50097ac8fc272c8c5b030149b630df02d1c639ee9f878a470ba2b", size = 5249671 }, + { url = "https://files.pythonhosted.org/packages/6b/fc/1a847ada0757cc7690f83959227514b1a52ff6de504619501c81805fa1da/gevent-24.11.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1d4fadc319b13ef0a3c44d2792f7918cf1bca27cacd4d41431c22e6b46668026", size = 6773903 }, + { url = "https://files.pythonhosted.org/packages/3b/9d/254dcf455f6659ab7e36bec0bc11f51b18ea25eac2de69185e858ccf3c30/gevent-24.11.1-cp310-cp310-win_amd64.whl", hash = "sha256:3d882faa24f347f761f934786dde6c73aa6c9187ee710189f12dcc3a63ed4a50", size = 1560443 }, + { url = "https://files.pythonhosted.org/packages/ea/fd/86a170f77ef51a15297573c50dbec4cc67ddc98b677cc2d03cc7f2927f4c/gevent-24.11.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:351d1c0e4ef2b618ace74c91b9b28b3eaa0dd45141878a964e03c7873af09f62", size = 2951424 }, + { url = "https://files.pythonhosted.org/packages/7f/0a/987268c9d446f61883bc627c77c5ed4a97869c0f541f76661a62b2c411f6/gevent-24.11.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5efe72e99b7243e222ba0c2c2ce9618d7d36644c166d63373af239da1036bab", size = 4878504 }, + { url = "https://files.pythonhosted.org/packages/dc/d4/2f77ddd837c0e21b4a4460bcb79318b6754d95ef138b7a29f3221c7e9993/gevent-24.11.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d3b249e4e1f40c598ab8393fc01ae6a3b4d51fc1adae56d9ba5b315f6b2d758", size = 5007668 }, + { url = "https://files.pythonhosted.org/packages/80/a0/829e0399a1f9b84c344b72d2be9aa60fe2a64e993cac221edcc14f069679/gevent-24.11.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81d918e952954675f93fb39001da02113ec4d5f4921bf5a0cc29719af6824e5d", size = 5067055 }, + { url = "https://files.pythonhosted.org/packages/1e/67/0e693f9ddb7909c2414f8fcfc2409aa4157884c147bc83dab979e9cf717c/gevent-24.11.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9c935b83d40c748b6421625465b7308d87c7b3717275acd587eef2bd1c39546", size = 6761883 }, + { url = "https://files.pythonhosted.org/packages/fa/b6/b69883fc069d7148dd23c5dda20826044e54e7197f3c8e72b8cc2cd4035a/gevent-24.11.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff96c5739834c9a594db0e12bf59cb3fa0e5102fc7b893972118a3166733d61c", size = 5440802 }, + { url = "https://files.pythonhosted.org/packages/32/4e/b00094d995ff01fd88b3cf6b9d1d794f935c31c645c431e65cd82d808c9c/gevent-24.11.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d6c0a065e31ef04658f799215dddae8752d636de2bed61365c358f9c91e7af61", size = 6866992 }, + { url = "https://files.pythonhosted.org/packages/37/ed/58dbe9fb09d36f6477ff8db0459ebd3be9a77dc05ae5d96dc91ad657610d/gevent-24.11.1-cp311-cp311-win_amd64.whl", hash = "sha256:97e2f3999a5c0656f42065d02939d64fffaf55861f7d62b0107a08f52c984897", size = 1543736 }, + { url = "https://files.pythonhosted.org/packages/dd/32/301676f67ffa996ff1c4175092fb0c48c83271cc95e5c67650b87156b6cf/gevent-24.11.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:a3d75fa387b69c751a3d7c5c3ce7092a171555126e136c1d21ecd8b50c7a6e46", size = 2956467 }, + { url = "https://files.pythonhosted.org/packages/6b/84/aef1a598123cef2375b6e2bf9d17606b961040f8a10e3dcc3c3dd2a99f05/gevent-24.11.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:beede1d1cff0c6fafae3ab58a0c470d7526196ef4cd6cc18e7769f207f2ea4eb", size = 5136486 }, + { url = "https://files.pythonhosted.org/packages/92/7b/04f61187ee1df7a913b3fca63b0a1206c29141ab4d2a57e7645237b6feb5/gevent-24.11.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:85329d556aaedced90a993226d7d1186a539c843100d393f2349b28c55131c85", size = 5299718 }, + { url = "https://files.pythonhosted.org/packages/36/2a/ebd12183ac25eece91d084be2111e582b061f4d15ead32239b43ed47e9ba/gevent-24.11.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:816b3883fa6842c1cf9d2786722014a0fd31b6312cca1f749890b9803000bad6", size = 5400118 }, + { url = "https://files.pythonhosted.org/packages/ec/c9/f006c0cd59f0720fbb62ee11da0ad4c4c0fd12799afd957dd491137e80d9/gevent-24.11.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b24d800328c39456534e3bc3e1684a28747729082684634789c2f5a8febe7671", size = 6775163 }, + { url = "https://files.pythonhosted.org/packages/49/f1/5edf00b674b10d67e3b967c2d46b8a124c2bc8cfd59d4722704392206444/gevent-24.11.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a5f1701ce0f7832f333dd2faf624484cbac99e60656bfbb72504decd42970f0f", size = 5479886 }, + { url = "https://files.pythonhosted.org/packages/22/11/c48e62744a32c0d48984268ae62b99edb81eaf0e03b42de52e2f09855509/gevent-24.11.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:d740206e69dfdfdcd34510c20adcb9777ce2cc18973b3441ab9767cd8948ca8a", size = 6891452 }, + { url = "https://files.pythonhosted.org/packages/11/b2/5d20664ef6a077bec9f27f7a7ee761edc64946d0b1e293726a3d074a9a18/gevent-24.11.1-cp312-cp312-win_amd64.whl", hash = "sha256:68bee86b6e1c041a187347ef84cf03a792f0b6c7238378bf6ba4118af11feaae", size = 1541631 }, + { url = "https://files.pythonhosted.org/packages/a4/8f/4958e70caeaf469c576ecc5b5f2cb49ddaad74336fa82363d89cddb3c284/gevent-24.11.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:d618e118fdb7af1d6c1a96597a5cd6ac84a9f3732b5be8515c6a66e098d498b6", size = 2949601 }, + { url = "https://files.pythonhosted.org/packages/3b/64/79892d250b7b2aa810688dfebe783aec02568e5cecacb1e100acbb9d95c6/gevent-24.11.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2142704c2adce9cd92f6600f371afb2860a446bfd0be5bd86cca5b3e12130766", size = 5107052 }, + { url = "https://files.pythonhosted.org/packages/66/44/9ee0ed1909b4f41375e32bf10036d5d8624962afcbd901573afdecd2e36a/gevent-24.11.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:92e0d7759de2450a501effd99374256b26359e801b2d8bf3eedd3751973e87f5", size = 5271736 }, + { url = "https://files.pythonhosted.org/packages/e3/48/0184b2622a388a256199c5fadcad6b52b6455019c2a4b19edd6de58e30ba/gevent-24.11.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca845138965c8c56d1550499d6b923eb1a2331acfa9e13b817ad8305dde83d11", size = 5367782 }, + { url = "https://files.pythonhosted.org/packages/9a/b1/1a2704c346234d889d2e0042efb182534f7d294115f0e9f99d8079fa17eb/gevent-24.11.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:356b73d52a227d3313f8f828025b665deada57a43d02b1cf54e5d39028dbcf8d", size = 6757533 }, + { url = "https://files.pythonhosted.org/packages/ed/6e/b2eed8dec617264f0046d50a13a42d3f0a06c50071b9fc1eae00285a03f1/gevent-24.11.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:58851f23c4bdb70390f10fc020c973ffcf409eb1664086792c8b1e20f25eef43", size = 5449436 }, + { url = "https://files.pythonhosted.org/packages/63/c2/eca6b95fbf9af287fa91c327494e4b74a8d5bfa0156cd87b233f63f118dc/gevent-24.11.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:1ea50009ecb7f1327347c37e9eb6561bdbc7de290769ee1404107b9a9cba7cf1", size = 6866470 }, + { url = "https://files.pythonhosted.org/packages/b7/e6/51824bd1f2c1ce70aa01495aa6ffe04ab789fa819fa7e6f0ad2388fb03c6/gevent-24.11.1-cp313-cp313-win_amd64.whl", hash = "sha256:ec68e270543ecd532c4c1d70fca020f90aa5486ad49c4f3b8b2e64a66f5c9274", size = 1540088 }, + { url = "https://files.pythonhosted.org/packages/a0/73/263d0f63186d27d205b3dc157efe838afe3aba10a3baca15d85e97b90eae/gevent-24.11.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9347690f4e53de2c4af74e62d6fabc940b6d4a6cad555b5a379f61e7d3f2a8e", size = 6658480 }, + { url = "https://files.pythonhosted.org/packages/8a/fd/ec7b5c764a3d1340160b82f7394fdc1220d18e11ae089c472cf7bcc2fe6a/gevent-24.11.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8619d5c888cb7aebf9aec6703e410620ef5ad48cdc2d813dd606f8aa7ace675f", size = 6808247 }, + { url = "https://files.pythonhosted.org/packages/95/82/2ce68dc8dbc2c3ed3f4e73f21e1b7a45d80b5225670225a48e695f248850/gevent-24.11.1-cp39-cp39-win32.whl", hash = "sha256:c6b775381f805ff5faf250e3a07c0819529571d19bb2a9d474bee8c3f90d66af", size = 1483133 }, + { url = "https://files.pythonhosted.org/packages/76/96/aa4cbcf1807187b65a9c9ff15b32b08c2014968be852dda34d212cf8cc58/gevent-24.11.1-cp39-cp39-win_amd64.whl", hash = "sha256:1c3443b0ed23dcb7c36a748d42587168672953d368f2956b17fad36d43b58836", size = 1566354 }, + { url = "https://files.pythonhosted.org/packages/86/63/197aa67250943b508b34995c2aa6b46402e7e6f11785487740c2057bfb20/gevent-24.11.1-pp310-pypy310_pp73-macosx_11_0_universal2.whl", hash = "sha256:f43f47e702d0c8e1b8b997c00f1601486f9f976f84ab704f8f11536e3fa144c9", size = 1271676 }, +] + +[[package]] +name = "greenlet" +version = "3.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2f/ff/df5fede753cc10f6a5be0931204ea30c35fa2f2ea7a35b25bdaf4fe40e46/greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467", size = 186022 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/25/90/5234a78dc0ef6496a6eb97b67a42a8e96742a56f7dc808cb954a85390448/greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563", size = 271235 }, + { url = "https://files.pythonhosted.org/packages/7c/16/cd631fa0ab7d06ef06387135b7549fdcc77d8d859ed770a0d28e47b20972/greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83", size = 637168 }, + { url = "https://files.pythonhosted.org/packages/2f/b1/aed39043a6fec33c284a2c9abd63ce191f4f1a07319340ffc04d2ed3256f/greenlet-3.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0", size = 648826 }, + { url = "https://files.pythonhosted.org/packages/76/25/40e0112f7f3ebe54e8e8ed91b2b9f970805143efef16d043dfc15e70f44b/greenlet-3.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120", size = 644443 }, + { url = "https://files.pythonhosted.org/packages/fb/2f/3850b867a9af519794784a7eeed1dd5bc68ffbcc5b28cef703711025fd0a/greenlet-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc", size = 643295 }, + { url = "https://files.pythonhosted.org/packages/cf/69/79e4d63b9387b48939096e25115b8af7cd8a90397a304f92436bcb21f5b2/greenlet-3.1.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617", size = 599544 }, + { url = "https://files.pythonhosted.org/packages/46/1d/44dbcb0e6c323bd6f71b8c2f4233766a5faf4b8948873225d34a0b7efa71/greenlet-3.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7", size = 1125456 }, + { url = "https://files.pythonhosted.org/packages/e0/1d/a305dce121838d0278cee39d5bb268c657f10a5363ae4b726848f833f1bb/greenlet-3.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6", size = 1149111 }, + { url = "https://files.pythonhosted.org/packages/96/28/d62835fb33fb5652f2e98d34c44ad1a0feacc8b1d3f1aecab035f51f267d/greenlet-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80", size = 298392 }, + { url = "https://files.pythonhosted.org/packages/28/62/1c2665558618553c42922ed47a4e6d6527e2fa3516a8256c2f431c5d0441/greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70", size = 272479 }, + { url = "https://files.pythonhosted.org/packages/76/9d/421e2d5f07285b6e4e3a676b016ca781f63cfe4a0cd8eaecf3fd6f7a71ae/greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159", size = 640404 }, + { url = "https://files.pythonhosted.org/packages/e5/de/6e05f5c59262a584e502dd3d261bbdd2c97ab5416cc9c0b91ea38932a901/greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e", size = 652813 }, + { url = "https://files.pythonhosted.org/packages/49/93/d5f93c84241acdea15a8fd329362c2c71c79e1a507c3f142a5d67ea435ae/greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1", size = 648517 }, + { url = "https://files.pythonhosted.org/packages/15/85/72f77fc02d00470c86a5c982b8daafdf65d38aefbbe441cebff3bf7037fc/greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383", size = 647831 }, + { url = "https://files.pythonhosted.org/packages/f7/4b/1c9695aa24f808e156c8f4813f685d975ca73c000c2a5056c514c64980f6/greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a", size = 602413 }, + { url = "https://files.pythonhosted.org/packages/76/70/ad6e5b31ef330f03b12559d19fda2606a522d3849cde46b24f223d6d1619/greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511", size = 1129619 }, + { url = "https://files.pythonhosted.org/packages/f4/fb/201e1b932e584066e0f0658b538e73c459b34d44b4bd4034f682423bc801/greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395", size = 1155198 }, + { url = "https://files.pythonhosted.org/packages/12/da/b9ed5e310bb8b89661b80cbcd4db5a067903bbcd7fc854923f5ebb4144f0/greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39", size = 298930 }, + { url = "https://files.pythonhosted.org/packages/7d/ec/bad1ac26764d26aa1353216fcbfa4670050f66d445448aafa227f8b16e80/greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d", size = 274260 }, + { url = "https://files.pythonhosted.org/packages/66/d4/c8c04958870f482459ab5956c2942c4ec35cac7fe245527f1039837c17a9/greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79", size = 649064 }, + { url = "https://files.pythonhosted.org/packages/51/41/467b12a8c7c1303d20abcca145db2be4e6cd50a951fa30af48b6ec607581/greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa", size = 663420 }, + { url = "https://files.pythonhosted.org/packages/27/8f/2a93cd9b1e7107d5c7b3b7816eeadcac2ebcaf6d6513df9abaf0334777f6/greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441", size = 658035 }, + { url = "https://files.pythonhosted.org/packages/57/5c/7c6f50cb12be092e1dccb2599be5a942c3416dbcfb76efcf54b3f8be4d8d/greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36", size = 660105 }, + { url = "https://files.pythonhosted.org/packages/f1/66/033e58a50fd9ec9df00a8671c74f1f3a320564c6415a4ed82a1c651654ba/greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9", size = 613077 }, + { url = "https://files.pythonhosted.org/packages/19/c5/36384a06f748044d06bdd8776e231fadf92fc896bd12cb1c9f5a1bda9578/greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0", size = 1135975 }, + { url = "https://files.pythonhosted.org/packages/38/f9/c0a0eb61bdf808d23266ecf1d63309f0e1471f284300ce6dac0ae1231881/greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942", size = 1163955 }, + { url = "https://files.pythonhosted.org/packages/43/21/a5d9df1d21514883333fc86584c07c2b49ba7c602e670b174bd73cfc9c7f/greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01", size = 299655 }, + { url = "https://files.pythonhosted.org/packages/f3/57/0db4940cd7bb461365ca8d6fd53e68254c9dbbcc2b452e69d0d41f10a85e/greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1", size = 272990 }, + { url = "https://files.pythonhosted.org/packages/1c/ec/423d113c9f74e5e402e175b157203e9102feeb7088cee844d735b28ef963/greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff", size = 649175 }, + { url = "https://files.pythonhosted.org/packages/a9/46/ddbd2db9ff209186b7b7c621d1432e2f21714adc988703dbdd0e65155c77/greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a", size = 663425 }, + { url = "https://files.pythonhosted.org/packages/bc/f9/9c82d6b2b04aa37e38e74f0c429aece5eeb02bab6e3b98e7db89b23d94c6/greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e", size = 657736 }, + { url = "https://files.pythonhosted.org/packages/d9/42/b87bc2a81e3a62c3de2b0d550bf91a86939442b7ff85abb94eec3fc0e6aa/greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4", size = 660347 }, + { url = "https://files.pythonhosted.org/packages/37/fa/71599c3fd06336cdc3eac52e6871cfebab4d9d70674a9a9e7a482c318e99/greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e", size = 615583 }, + { url = "https://files.pythonhosted.org/packages/4e/96/e9ef85de031703ee7a4483489b40cf307f93c1824a02e903106f2ea315fe/greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1", size = 1133039 }, + { url = "https://files.pythonhosted.org/packages/87/76/b2b6362accd69f2d1889db61a18c94bc743e961e3cab344c2effaa4b4a25/greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c", size = 1160716 }, + { url = "https://files.pythonhosted.org/packages/1f/1b/54336d876186920e185066d8c3024ad55f21d7cc3683c856127ddb7b13ce/greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761", size = 299490 }, + { url = "https://files.pythonhosted.org/packages/5f/17/bea55bf36990e1638a2af5ba10c1640273ef20f627962cf97107f1e5d637/greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011", size = 643731 }, + { url = "https://files.pythonhosted.org/packages/78/d2/aa3d2157f9ab742a08e0fd8f77d4699f37c22adfbfeb0c610a186b5f75e0/greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13", size = 649304 }, + { url = "https://files.pythonhosted.org/packages/f1/8e/d0aeffe69e53ccff5a28fa86f07ad1d2d2d6537a9506229431a2a02e2f15/greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475", size = 646537 }, + { url = "https://files.pythonhosted.org/packages/05/79/e15408220bbb989469c8871062c97c6c9136770657ba779711b90870d867/greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b", size = 642506 }, + { url = "https://files.pythonhosted.org/packages/18/87/470e01a940307796f1d25f8167b551a968540fbe0551c0ebb853cb527dd6/greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822", size = 602753 }, + { url = "https://files.pythonhosted.org/packages/e2/72/576815ba674eddc3c25028238f74d7b8068902b3968cbe456771b166455e/greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01", size = 1122731 }, + { url = "https://files.pythonhosted.org/packages/ac/38/08cc303ddddc4b3d7c628c3039a61a3aae36c241ed01393d00c2fd663473/greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6", size = 1142112 }, + { url = "https://files.pythonhosted.org/packages/8c/82/8051e82af6d6b5150aacb6789a657a8afd48f0a44d8e91cb72aaaf28553a/greenlet-3.1.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3", size = 270027 }, + { url = "https://files.pythonhosted.org/packages/f9/74/f66de2785880293780eebd18a2958aeea7cbe7814af1ccef634f4701f846/greenlet-3.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42", size = 634822 }, + { url = "https://files.pythonhosted.org/packages/68/23/acd9ca6bc412b02b8aa755e47b16aafbe642dde0ad2f929f836e57a7949c/greenlet-3.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f", size = 646866 }, + { url = "https://files.pythonhosted.org/packages/a9/ab/562beaf8a53dc9f6b2459f200e7bc226bb07e51862a66351d8b7817e3efd/greenlet-3.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437", size = 641985 }, + { url = "https://files.pythonhosted.org/packages/03/d3/1006543621f16689f6dc75f6bcf06e3c23e044c26fe391c16c253623313e/greenlet-3.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145", size = 641268 }, + { url = "https://files.pythonhosted.org/packages/2f/c1/ad71ce1b5f61f900593377b3f77b39408bce5dc96754790311b49869e146/greenlet-3.1.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c", size = 597376 }, + { url = "https://files.pythonhosted.org/packages/f7/ff/183226685b478544d61d74804445589e069d00deb8ddef042699733950c7/greenlet-3.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e", size = 1123359 }, + { url = "https://files.pythonhosted.org/packages/c0/8b/9b3b85a89c22f55f315908b94cd75ab5fed5973f7393bbef000ca8b2c5c1/greenlet-3.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e", size = 1147458 }, + { url = "https://files.pythonhosted.org/packages/b8/1c/248fadcecd1790b0ba793ff81fa2375c9ad6442f4c748bf2cc2e6563346a/greenlet-3.1.1-cp39-cp39-win32.whl", hash = "sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c", size = 281131 }, + { url = "https://files.pythonhosted.org/packages/ae/02/e7d0aef2354a38709b764df50b2b83608f0621493e47f47694eb80922822/greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22", size = 298306 }, +] + +[[package]] +name = "h11" +version = "0.14.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f5/38/3af3d3633a34a3316095b39c8e8fb4853a28a536e55d347bd8d8e9a14b03/h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d", size = 100418 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/95/04/ff642e65ad6b90db43e668d70ffb6736436c7ce41fcc549f4e9472234127/h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761", size = 58259 }, +] + +[[package]] +name = "httpcore" +version = "1.0.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6a/41/d7d0a89eb493922c37d343b607bc1b5da7f5be7e383740b4753ad8943e90/httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c", size = 85196 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/87/f5/72347bc88306acb359581ac4d52f23c0ef445b57157adedb9aee0cd689d2/httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd", size = 78551 }, +] + +[[package]] +name = "httpx" +version = "0.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517 }, +] + +[[package]] +name = "identify" +version = "2.6.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/cf/92/69934b9ef3c31ca2470980423fda3d00f0460ddefdf30a67adf7f17e2e00/identify-2.6.5.tar.gz", hash = "sha256:c10b33f250e5bba374fae86fb57f3adcebf1161bce7cdf92031915fd480c13bc", size = 99213 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/fa/dce098f4cdf7621aa8f7b4f919ce545891f489482f0bfa5102f3eca8608b/identify-2.6.5-py2.py3-none-any.whl", hash = "sha256:14181a47091eb75b337af4c23078c9d09225cd4c48929f521f3bf16b09d02566", size = 99078 }, +] + +[[package]] +name = "idna" +version = "3.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 }, +] + +[[package]] +name = "imagesize" +version = "1.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a7/84/62473fb57d61e31fef6e36d64a179c8781605429fd927b5dd608c997be31/imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a", size = 1280026 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ff/62/85c4c919272577931d407be5ba5d71c20f0b616d31a0befe0ae45bb79abd/imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b", size = 8769 }, +] + +[[package]] +name = "importlib-metadata" +version = "8.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "zipp", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cd/12/33e59336dca5be0c398a7482335911a33aa0e20776128f038019f1a95f1b/importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7", size = 55304 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/d9/a1e041c5e7caa9a05c925f4bdbdfb7f006d1f74996af53467bc394c97be7/importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b", size = 26514 }, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", size = 4646 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374", size = 5892 }, +] + +[[package]] +name = "jinja2" +version = "3.1.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/af/92/b3130cbbf5591acf9ade8708c365f3238046ac7cb8ccba6e81abccb0ccff/jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb", size = 244674 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bd/0f/2ba5fbcd631e3e88689309dbe978c5769e883e4b84ebfe7da30b43275c5a/jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb", size = 134596 }, +] + +[[package]] +name = "jmespath" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/00/2a/e867e8531cf3e36b41201936b7fa7ba7b5702dbef42922193f05c8976cd6/jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe", size = 25843 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/b4/b9b800c45527aadd64d5b442f9b932b00648617eb5d63d2c7a6587b7cafc/jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", size = 20256 }, +] + +[[package]] +name = "markupsafe" +version = "3.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/90/d08277ce111dd22f77149fd1a5d4653eeb3b3eaacbdfcbae5afb2600eebd/MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8", size = 14357 }, + { url = "https://files.pythonhosted.org/packages/04/e1/6e2194baeae0bca1fae6629dc0cbbb968d4d941469cbab11a3872edff374/MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158", size = 12393 }, + { url = "https://files.pythonhosted.org/packages/1d/69/35fa85a8ece0a437493dc61ce0bb6d459dcba482c34197e3efc829aa357f/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579", size = 21732 }, + { url = "https://files.pythonhosted.org/packages/22/35/137da042dfb4720b638d2937c38a9c2df83fe32d20e8c8f3185dbfef05f7/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d", size = 20866 }, + { url = "https://files.pythonhosted.org/packages/29/28/6d029a903727a1b62edb51863232152fd335d602def598dade38996887f0/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb", size = 20964 }, + { url = "https://files.pythonhosted.org/packages/cc/cd/07438f95f83e8bc028279909d9c9bd39e24149b0d60053a97b2bc4f8aa51/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b", size = 21977 }, + { url = "https://files.pythonhosted.org/packages/29/01/84b57395b4cc062f9c4c55ce0df7d3108ca32397299d9df00fedd9117d3d/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c", size = 21366 }, + { url = "https://files.pythonhosted.org/packages/bd/6e/61ebf08d8940553afff20d1fb1ba7294b6f8d279df9fd0c0db911b4bbcfd/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171", size = 21091 }, + { url = "https://files.pythonhosted.org/packages/11/23/ffbf53694e8c94ebd1e7e491de185124277964344733c45481f32ede2499/MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50", size = 15065 }, + { url = "https://files.pythonhosted.org/packages/44/06/e7175d06dd6e9172d4a69a72592cb3f7a996a9c396eee29082826449bbc3/MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a", size = 15514 }, + { url = "https://files.pythonhosted.org/packages/6b/28/bbf83e3f76936960b850435576dd5e67034e200469571be53f69174a2dfd/MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d", size = 14353 }, + { url = "https://files.pythonhosted.org/packages/6c/30/316d194b093cde57d448a4c3209f22e3046c5bb2fb0820b118292b334be7/MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93", size = 12392 }, + { url = "https://files.pythonhosted.org/packages/f2/96/9cdafba8445d3a53cae530aaf83c38ec64c4d5427d975c974084af5bc5d2/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832", size = 23984 }, + { url = "https://files.pythonhosted.org/packages/f1/a4/aefb044a2cd8d7334c8a47d3fb2c9f328ac48cb349468cc31c20b539305f/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84", size = 23120 }, + { url = "https://files.pythonhosted.org/packages/8d/21/5e4851379f88f3fad1de30361db501300d4f07bcad047d3cb0449fc51f8c/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca", size = 23032 }, + { url = "https://files.pythonhosted.org/packages/00/7b/e92c64e079b2d0d7ddf69899c98842f3f9a60a1ae72657c89ce2655c999d/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798", size = 24057 }, + { url = "https://files.pythonhosted.org/packages/f9/ac/46f960ca323037caa0a10662ef97d0a4728e890334fc156b9f9e52bcc4ca/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e", size = 23359 }, + { url = "https://files.pythonhosted.org/packages/69/84/83439e16197337b8b14b6a5b9c2105fff81d42c2a7c5b58ac7b62ee2c3b1/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4", size = 23306 }, + { url = "https://files.pythonhosted.org/packages/9a/34/a15aa69f01e2181ed8d2b685c0d2f6655d5cca2c4db0ddea775e631918cd/MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d", size = 15094 }, + { url = "https://files.pythonhosted.org/packages/da/b8/3a3bd761922d416f3dc5d00bfbed11f66b1ab89a0c2b6e887240a30b0f6b/MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b", size = 15521 }, + { url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274 }, + { url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348 }, + { url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149 }, + { url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118 }, + { url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993 }, + { url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178 }, + { url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319 }, + { url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352 }, + { url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097 }, + { url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601 }, + { url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274 }, + { url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352 }, + { url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122 }, + { url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085 }, + { url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978 }, + { url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208 }, + { url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357 }, + { url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344 }, + { url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101 }, + { url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603 }, + { url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510 }, + { url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486 }, + { url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480 }, + { url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914 }, + { url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796 }, + { url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473 }, + { url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114 }, + { url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098 }, + { url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208 }, + { url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739 }, + { url = "https://files.pythonhosted.org/packages/a7/ea/9b1530c3fdeeca613faeb0fb5cbcf2389d816072fab72a71b45749ef6062/MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a", size = 14344 }, + { url = "https://files.pythonhosted.org/packages/4b/c2/fbdbfe48848e7112ab05e627e718e854d20192b674952d9042ebd8c9e5de/MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff", size = 12389 }, + { url = "https://files.pythonhosted.org/packages/f0/25/7a7c6e4dbd4f867d95d94ca15449e91e52856f6ed1905d58ef1de5e211d0/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13", size = 21607 }, + { url = "https://files.pythonhosted.org/packages/53/8f/f339c98a178f3c1e545622206b40986a4c3307fe39f70ccd3d9df9a9e425/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144", size = 20728 }, + { url = "https://files.pythonhosted.org/packages/1a/03/8496a1a78308456dbd50b23a385c69b41f2e9661c67ea1329849a598a8f9/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29", size = 20826 }, + { url = "https://files.pythonhosted.org/packages/e6/cf/0a490a4bd363048c3022f2f475c8c05582179bb179defcee4766fb3dcc18/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0", size = 21843 }, + { url = "https://files.pythonhosted.org/packages/19/a3/34187a78613920dfd3cdf68ef6ce5e99c4f3417f035694074beb8848cd77/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0", size = 21219 }, + { url = "https://files.pythonhosted.org/packages/17/d8/5811082f85bb88410ad7e452263af048d685669bbbfb7b595e8689152498/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178", size = 20946 }, + { url = "https://files.pythonhosted.org/packages/7c/31/bd635fb5989440d9365c5e3c47556cfea121c7803f5034ac843e8f37c2f2/MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f", size = 15063 }, + { url = "https://files.pythonhosted.org/packages/b3/73/085399401383ce949f727afec55ec3abd76648d04b9f22e1c0e99cb4bec3/MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a", size = 15506 }, +] + +[[package]] +name = "mockupdb" +version = "1.9.0.dev1" +source = { git = "https://github.com/mongodb-labs/mongo-mockup-db?rev=master#317c4e049965f9d99423698a81e52d0ab37b7599" } +dependencies = [ + { name = "pymongo" }, +] + +[[package]] +name = "mypy" +version = "1.14.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mypy-extensions" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b9/eb/2c92d8ea1e684440f54fa49ac5d9a5f19967b7b472a281f419e69a8d228e/mypy-1.14.1.tar.gz", hash = "sha256:7ec88144fe9b510e8475ec2f5f251992690fcf89ccb4500b214b4226abcd32d6", size = 3216051 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9b/7a/87ae2adb31d68402da6da1e5f30c07ea6063e9f09b5e7cfc9dfa44075e74/mypy-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:52686e37cf13d559f668aa398dd7ddf1f92c5d613e4f8cb262be2fb4fedb0fcb", size = 11211002 }, + { url = "https://files.pythonhosted.org/packages/e1/23/eada4c38608b444618a132be0d199b280049ded278b24cbb9d3fc59658e4/mypy-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1fb545ca340537d4b45d3eecdb3def05e913299ca72c290326be19b3804b39c0", size = 10358400 }, + { url = "https://files.pythonhosted.org/packages/43/c9/d6785c6f66241c62fd2992b05057f404237deaad1566545e9f144ced07f5/mypy-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:90716d8b2d1f4cd503309788e51366f07c56635a3309b0f6a32547eaaa36a64d", size = 12095172 }, + { url = "https://files.pythonhosted.org/packages/c3/62/daa7e787770c83c52ce2aaf1a111eae5893de9e004743f51bfcad9e487ec/mypy-1.14.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ae753f5c9fef278bcf12e1a564351764f2a6da579d4a81347e1d5a15819997b", size = 12828732 }, + { url = "https://files.pythonhosted.org/packages/1b/a2/5fb18318a3637f29f16f4e41340b795da14f4751ef4f51c99ff39ab62e52/mypy-1.14.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e0fe0f5feaafcb04505bcf439e991c6d8f1bf8b15f12b05feeed96e9e7bf1427", size = 13012197 }, + { url = "https://files.pythonhosted.org/packages/28/99/e153ce39105d164b5f02c06c35c7ba958aaff50a2babba7d080988b03fe7/mypy-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:7d54bd85b925e501c555a3227f3ec0cfc54ee8b6930bd6141ec872d1c572f81f", size = 9780836 }, + { url = "https://files.pythonhosted.org/packages/da/11/a9422850fd506edbcdc7f6090682ecceaf1f87b9dd847f9df79942da8506/mypy-1.14.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f995e511de847791c3b11ed90084a7a0aafdc074ab88c5a9711622fe4751138c", size = 11120432 }, + { url = "https://files.pythonhosted.org/packages/b6/9e/47e450fd39078d9c02d620545b2cb37993a8a8bdf7db3652ace2f80521ca/mypy-1.14.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d64169ec3b8461311f8ce2fd2eb5d33e2d0f2c7b49116259c51d0d96edee48d1", size = 10279515 }, + { url = "https://files.pythonhosted.org/packages/01/b5/6c8d33bd0f851a7692a8bfe4ee75eb82b6983a3cf39e5e32a5d2a723f0c1/mypy-1.14.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ba24549de7b89b6381b91fbc068d798192b1b5201987070319889e93038967a8", size = 12025791 }, + { url = "https://files.pythonhosted.org/packages/f0/4c/e10e2c46ea37cab5c471d0ddaaa9a434dc1d28650078ac1b56c2d7b9b2e4/mypy-1.14.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:183cf0a45457d28ff9d758730cd0210419ac27d4d3f285beda038c9083363b1f", size = 12749203 }, + { url = "https://files.pythonhosted.org/packages/88/55/beacb0c69beab2153a0f57671ec07861d27d735a0faff135a494cd4f5020/mypy-1.14.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f2a0ecc86378f45347f586e4163d1769dd81c5a223d577fe351f26b179e148b1", size = 12885900 }, + { url = "https://files.pythonhosted.org/packages/a2/75/8c93ff7f315c4d086a2dfcde02f713004357d70a163eddb6c56a6a5eff40/mypy-1.14.1-cp311-cp311-win_amd64.whl", hash = "sha256:ad3301ebebec9e8ee7135d8e3109ca76c23752bac1e717bc84cd3836b4bf3eae", size = 9777869 }, + { url = "https://files.pythonhosted.org/packages/43/1b/b38c079609bb4627905b74fc6a49849835acf68547ac33d8ceb707de5f52/mypy-1.14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:30ff5ef8519bbc2e18b3b54521ec319513a26f1bba19a7582e7b1f58a6e69f14", size = 11266668 }, + { url = "https://files.pythonhosted.org/packages/6b/75/2ed0d2964c1ffc9971c729f7a544e9cd34b2cdabbe2d11afd148d7838aa2/mypy-1.14.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cb9f255c18052343c70234907e2e532bc7e55a62565d64536dbc7706a20b78b9", size = 10254060 }, + { url = "https://files.pythonhosted.org/packages/a1/5f/7b8051552d4da3c51bbe8fcafffd76a6823779101a2b198d80886cd8f08e/mypy-1.14.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b4e3413e0bddea671012b063e27591b953d653209e7a4fa5e48759cda77ca11", size = 11933167 }, + { url = "https://files.pythonhosted.org/packages/04/90/f53971d3ac39d8b68bbaab9a4c6c58c8caa4d5fd3d587d16f5927eeeabe1/mypy-1.14.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:553c293b1fbdebb6c3c4030589dab9fafb6dfa768995a453d8a5d3b23784af2e", size = 12864341 }, + { url = "https://files.pythonhosted.org/packages/03/d2/8bc0aeaaf2e88c977db41583559319f1821c069e943ada2701e86d0430b7/mypy-1.14.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fad79bfe3b65fe6a1efaed97b445c3d37f7be9fdc348bdb2d7cac75579607c89", size = 12972991 }, + { url = "https://files.pythonhosted.org/packages/6f/17/07815114b903b49b0f2cf7499f1c130e5aa459411596668267535fe9243c/mypy-1.14.1-cp312-cp312-win_amd64.whl", hash = "sha256:8fa2220e54d2946e94ab6dbb3ba0a992795bd68b16dc852db33028df2b00191b", size = 9879016 }, + { url = "https://files.pythonhosted.org/packages/9e/15/bb6a686901f59222275ab228453de741185f9d54fecbaacec041679496c6/mypy-1.14.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:92c3ed5afb06c3a8e188cb5da4984cab9ec9a77ba956ee419c68a388b4595255", size = 11252097 }, + { url = "https://files.pythonhosted.org/packages/f8/b3/8b0f74dfd072c802b7fa368829defdf3ee1566ba74c32a2cb2403f68024c/mypy-1.14.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:dbec574648b3e25f43d23577309b16534431db4ddc09fda50841f1e34e64ed34", size = 10239728 }, + { url = "https://files.pythonhosted.org/packages/c5/9b/4fd95ab20c52bb5b8c03cc49169be5905d931de17edfe4d9d2986800b52e/mypy-1.14.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8c6d94b16d62eb3e947281aa7347d78236688e21081f11de976376cf010eb31a", size = 11924965 }, + { url = "https://files.pythonhosted.org/packages/56/9d/4a236b9c57f5d8f08ed346914b3f091a62dd7e19336b2b2a0d85485f82ff/mypy-1.14.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d4b19b03fdf54f3c5b2fa474c56b4c13c9dbfb9a2db4370ede7ec11a2c5927d9", size = 12867660 }, + { url = "https://files.pythonhosted.org/packages/40/88/a61a5497e2f68d9027de2bb139c7bb9abaeb1be1584649fa9d807f80a338/mypy-1.14.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0c911fde686394753fff899c409fd4e16e9b294c24bfd5e1ea4675deae1ac6fd", size = 12969198 }, + { url = "https://files.pythonhosted.org/packages/54/da/3d6fc5d92d324701b0c23fb413c853892bfe0e1dbe06c9138037d459756b/mypy-1.14.1-cp313-cp313-win_amd64.whl", hash = "sha256:8b21525cb51671219f5307be85f7e646a153e5acc656e5cebf64bfa076c50107", size = 9885276 }, + { url = "https://files.pythonhosted.org/packages/ca/1f/186d133ae2514633f8558e78cd658070ba686c0e9275c5a5c24a1e1f0d67/mypy-1.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3888a1816d69f7ab92092f785a462944b3ca16d7c470d564165fe703b0970c35", size = 11200493 }, + { url = "https://files.pythonhosted.org/packages/af/fc/4842485d034e38a4646cccd1369f6b1ccd7bc86989c52770d75d719a9941/mypy-1.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46c756a444117c43ee984bd055db99e498bc613a70bbbc120272bd13ca579fbc", size = 10357702 }, + { url = "https://files.pythonhosted.org/packages/b4/e6/457b83f2d701e23869cfec013a48a12638f75b9d37612a9ddf99072c1051/mypy-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:27fc248022907e72abfd8e22ab1f10e903915ff69961174784a3900a8cba9ad9", size = 12091104 }, + { url = "https://files.pythonhosted.org/packages/f1/bf/76a569158db678fee59f4fd30b8e7a0d75bcbaeef49edd882a0d63af6d66/mypy-1.14.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:499d6a72fb7e5de92218db961f1a66d5f11783f9ae549d214617edab5d4dbdbb", size = 12830167 }, + { url = "https://files.pythonhosted.org/packages/43/bc/0bc6b694b3103de9fed61867f1c8bd33336b913d16831431e7cb48ef1c92/mypy-1.14.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:57961db9795eb566dc1d1b4e9139ebc4c6b0cb6e7254ecde69d1552bf7613f60", size = 13013834 }, + { url = "https://files.pythonhosted.org/packages/b0/79/5f5ec47849b6df1e6943d5fd8e6632fbfc04b4fd4acfa5a5a9535d11b4e2/mypy-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:07ba89fdcc9451f2ebb02853deb6aaaa3d2239a236669a63ab3801bbf923ef5c", size = 9781231 }, + { url = "https://files.pythonhosted.org/packages/a0/b5/32dd67b69a16d088e533962e5044e51004176a9952419de0370cdaead0f8/mypy-1.14.1-py3-none-any.whl", hash = "sha256:b66a60cc4073aeb8ae00057f9c1f64d49e90f918fbcef9a977eb121da8b8f1d1", size = 2752905 }, +] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/98/a4/1ab47638b92648243faf97a5aeb6ea83059cc3624972ab6b8d2316078d3f/mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782", size = 4433 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/e2/5d3f6ada4297caebe1a2add3b126fe800c96f56dbe5d1988a2cbe0b267aa/mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d", size = 4695 }, +] + +[[package]] +name = "nodeenv" +version = "1.9.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314 }, +] + +[[package]] +name = "packaging" +version = "24.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d0/63/68dbb6eb2de9cb10ee4c9c14a0148804425e13c4fb20d61cce69f53106da/packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f", size = 163950 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/ef/eb23f262cca3c0c4eb7ab1933c3b1f03d021f2c48f54763065b6f0e321be/packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759", size = 65451 }, +] + +[[package]] +name = "pip" +version = "24.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f4/b1/b422acd212ad7eedddaf7981eee6e5de085154ff726459cf2da7c5a184c1/pip-24.3.1.tar.gz", hash = "sha256:ebcb60557f2aefabc2e0f918751cd24ea0d56d8ec5445fe1807f1d2109660b99", size = 1931073 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/7d/500c9ad20238fcfcb4cb9243eede163594d7020ce87bd9610c9e02771876/pip-24.3.1-py3-none-any.whl", hash = "sha256:3790624780082365f47549d032f3770eeb2b1e8bd1f7b2e02dace1afa361b4ed", size = 1822182 }, +] + +[[package]] +name = "platformdirs" +version = "4.3.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/13/fc/128cc9cb8f03208bdbf93d3aa862e16d376844a14f9a0ce5cf4507372de4/platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907", size = 21302 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/a6/bc1012356d8ece4d66dd75c4b9fc6c1f6650ddd5991e421177d9f8f671be/platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb", size = 18439 }, +] + +[[package]] +name = "pluggy" +version = "1.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/96/2d/02d4312c973c6050a18b314a5ad0b3210edb65a906f868e31c111dede4a6/pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", size = 67955 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669", size = 20556 }, +] + +[[package]] +name = "pre-commit" +version = "4.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cfgv" }, + { name = "identify" }, + { name = "nodeenv" }, + { name = "pyyaml" }, + { name = "virtualenv" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2e/c8/e22c292035f1bac8b9f5237a2622305bc0304e776080b246f3df57c4ff9f/pre_commit-4.0.1.tar.gz", hash = "sha256:80905ac375958c0444c65e9cebebd948b3cdb518f335a091a670a89d652139d2", size = 191678 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/16/8f/496e10d51edd6671ebe0432e33ff800aa86775d2d147ce7d43389324a525/pre_commit-4.0.1-py2.py3-none-any.whl", hash = "sha256:efde913840816312445dc98787724647c65473daefe420785f885e8ed9a06878", size = 218713 }, +] + +[[package]] +name = "pyasn1" +version = "0.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/e9/01f1a64245b89f039897cb0130016d79f77d52669aae6ee7b159a6c4c018/pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034", size = 145322 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/f1/d6a797abb14f6283c0ddff96bbdd46937f64122b8c925cab503dd37f8214/pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629", size = 83135 }, +] + +[[package]] +name = "pyasn1-modules" +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyasn1" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1d/67/6afbf0d507f73c32d21084a79946bfcfca5fbc62a72057e9c23797a737c9/pyasn1_modules-0.4.1.tar.gz", hash = "sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c", size = 310028 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/77/89/bc88a6711935ba795a679ea6ebee07e128050d6382eaa35a0a47c8032bdc/pyasn1_modules-0.4.1-py3-none-any.whl", hash = "sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd", size = 181537 }, +] + +[[package]] +name = "pycparser" +version = "2.22" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552 }, +] + +[[package]] +name = "pygments" +version = "2.19.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7c/2d/c3338d48ea6cc0feb8446d8e6937e1408088a72a39937982cc6111d17f84/pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f", size = 4968581 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8a/0b/9fcc47d19c48b59121088dd6da2488a49d5f72dacf8262e2790a1d2c7d15/pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c", size = 1225293 }, +] + +[[package]] +name = "pykerberos" +version = "1.2.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/07/e9/ae44ea7d7605df9e5ca1ed745a2c5672dc838a8398101051dd5f255b130d/pykerberos-1.2.4.tar.gz", hash = "sha256:9d701ebd8fc596c99d3155d5ba45813bd5908d26ef83ba0add250edb622abed4", size = 25046 } + +[[package]] +name = "pymongo" +version = "4.11.0.dev0" +source = { editable = "." } +dependencies = [ + { name = "dnspython" }, +] + +[package.optional-dependencies] +aws = [ + { name = "pymongo-auth-aws" }, +] +docs = [ + { name = "furo" }, + { name = "readthedocs-sphinx-search" }, + { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "sphinx-autobuild" }, + { name = "sphinx-rtd-theme" }, + { name = "sphinxcontrib-shellcheck" }, +] +encryption = [ + { name = "certifi", marker = "os_name == 'nt' or sys_platform == 'darwin'" }, + { name = "pymongo-auth-aws" }, + { name = "pymongocrypt" }, +] +gssapi = [ + { name = "pykerberos", marker = "os_name != 'nt'" }, + { name = "winkerberos", marker = "os_name == 'nt'" }, +] +ocsp = [ + { name = "certifi", marker = "os_name == 'nt' or sys_platform == 'darwin'" }, + { name = "cryptography" }, + { name = "pyopenssl" }, + { name = "requests" }, + { name = "service-identity" }, +] +snappy = [ + { name = "python-snappy" }, +] +test = [ + { name = "pytest" }, + { name = "pytest-asyncio" }, +] +zstd = [ + { name = "zstandard" }, +] + +[package.dev-dependencies] +coverage = [ + { name = "coverage" }, + { name = "pytest-cov" }, +] +dev = [ + { name = "pre-commit" }, +] +eventlet = [ + { name = "eventlet" }, +] +gevent = [ + { name = "gevent" }, +] +mockupdb = [ + { name = "mockupdb" }, +] +perf = [ + { name = "simplejson" }, +] +pymongocrypt-source = [ + { name = "pymongocrypt" }, +] +typing = [ + { name = "mypy" }, + { name = "pip" }, + { name = "pyright" }, + { name = "typing-extensions" }, +] + +[package.metadata] +requires-dist = [ + { name = "certifi", marker = "(os_name == 'nt' and extra == 'encryption') or (sys_platform == 'darwin' and extra == 'encryption')" }, + { name = "certifi", marker = "(os_name == 'nt' and extra == 'ocsp') or (sys_platform == 'darwin' and extra == 'ocsp')" }, + { name = "cryptography", marker = "extra == 'ocsp'", specifier = ">=2.5" }, + { name = "dnspython", specifier = ">=1.16.0,<3.0.0" }, + { name = "furo", marker = "extra == 'docs'", specifier = "==2024.8.6" }, + { name = "pykerberos", marker = "os_name != 'nt' and extra == 'gssapi'" }, + { name = "pymongo-auth-aws", marker = "extra == 'aws'", specifier = ">=1.1.0,<2.0.0" }, + { name = "pymongo-auth-aws", marker = "extra == 'encryption'", specifier = ">=1.1.0,<2.0.0" }, + { name = "pymongocrypt", marker = "extra == 'encryption'", specifier = ">=1.12.0,<2.0.0" }, + { name = "pyopenssl", marker = "extra == 'ocsp'", specifier = ">=17.2.0" }, + { name = "pytest", marker = "extra == 'test'", specifier = ">=8.2" }, + { name = "pytest-asyncio", marker = "extra == 'test'", specifier = ">=0.24.0" }, + { name = "python-snappy", marker = "extra == 'snappy'" }, + { name = "readthedocs-sphinx-search", marker = "extra == 'docs'", specifier = "~=0.3" }, + { name = "requests", marker = "extra == 'ocsp'", specifier = "<3.0.0" }, + { name = "service-identity", marker = "extra == 'ocsp'", specifier = ">=18.1.0" }, + { name = "sphinx", marker = "extra == 'docs'", specifier = ">=5.3,<9" }, + { name = "sphinx-autobuild", marker = "extra == 'docs'", specifier = ">=2020.9.1" }, + { name = "sphinx-rtd-theme", marker = "extra == 'docs'", specifier = ">=2,<4" }, + { name = "sphinxcontrib-shellcheck", marker = "extra == 'docs'", specifier = ">=1,<2" }, + { name = "winkerberos", marker = "os_name == 'nt' and extra == 'gssapi'", specifier = ">=0.5.0" }, + { name = "zstandard", marker = "extra == 'zstd'" }, +] + +[package.metadata.requires-dev] +coverage = [ + { name = "coverage", specifier = ">=5,<=7.5" }, + { name = "pytest-cov" }, +] +dev = [{ name = "pre-commit", specifier = ">=4.0" }] +eventlet = [{ name = "eventlet" }] +gevent = [{ name = "gevent" }] +mockupdb = [{ name = "mockupdb", git = "https://github.com/mongodb-labs/mongo-mockup-db?rev=master" }] +perf = [{ name = "simplejson" }] +pymongocrypt-source = [{ name = "pymongocrypt", git = "https://github.com/mongodb/libmongocrypt?subdirectory=bindings%2Fpython&rev=master" }] +typing = [ + { name = "mypy", specifier = "==1.14.1" }, + { name = "pip" }, + { name = "pyright", specifier = "==1.1.392.post0" }, + { name = "typing-extensions" }, +] + +[[package]] +name = "pymongo-auth-aws" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "boto3" }, + { name = "botocore" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c7/37/ca8d840f322f0047b71afcec7a489b1ea1f59a5f6d29f91ad8004024736f/pymongo_auth_aws-1.3.0.tar.gz", hash = "sha256:d0fa893958dc525ca29f601c34f2ca73c860f66bc6511ec0a7da6eb7ea44e94f", size = 18559 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f4/12/a997fc108416f31fac55748e5406c1c8c4e976a4073f07b5553825641611/pymongo_auth_aws-1.3.0-py3-none-any.whl", hash = "sha256:367f6d853da428a02e9e450422756133715d40f8141f47ae5d98f139a88c0ce5", size = 15470 }, +] + +[[package]] +name = "pymongocrypt" +version = "1.13.0.dev0" +source = { git = "https://github.com/mongodb/libmongocrypt?subdirectory=bindings%2Fpython&rev=master#90476d5db7737bab2ce1c198df5671a12dbaae1a" } +dependencies = [ + { name = "cffi" }, + { name = "cryptography" }, + { name = "httpx" }, + { name = "packaging" }, +] + +[[package]] +name = "pyopenssl" +version = "25.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cryptography" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9f/26/e25b4a374b4639e0c235527bbe31c0524f26eda701d79456a7e1877f4cc5/pyopenssl-25.0.0.tar.gz", hash = "sha256:cd2cef799efa3936bb08e8ccb9433a575722b9dd986023f1cabc4ae64e9dac16", size = 179573 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ca/d7/eb76863d2060dcbe7c7e6cccfd95ac02ea0b9acc37745a0d99ff6457aefb/pyOpenSSL-25.0.0-py3-none-any.whl", hash = "sha256:424c247065e46e76a37411b9ab1782541c23bb658bf003772c3405fbaa128e90", size = 56453 }, +] + +[[package]] +name = "pyright" +version = "1.1.392.post0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "nodeenv" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/df/3c6f6b08fba7ccf49b114dfc4bb33e25c299883fd763f93fad47ef8bc58d/pyright-1.1.392.post0.tar.gz", hash = "sha256:3b7f88de74a28dcfa90c7d90c782b6569a48c2be5f9d4add38472bdaac247ebd", size = 3789911 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e7/b1/a18de17f40e4f61ca58856b9ef9b0febf74ff88978c3f7776f910071f567/pyright-1.1.392.post0-py3-none-any.whl", hash = "sha256:252f84458a46fa2f0fd4e2f91fc74f50b9ca52c757062e93f6c250c0d8329eb2", size = 5595487 }, +] + +[[package]] +name = "pytest" +version = "8.3.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/05/35/30e0d83068951d90a01852cb1cef56e5d8a09d20c7f511634cc2f7e0372a/pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761", size = 1445919 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/11/92/76a1c94d3afee238333bc0a42b82935dd8f9cf8ce9e336ff87ee14d9e1cf/pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6", size = 343083 }, +] + +[[package]] +name = "pytest-asyncio" +version = "0.25.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/72/df/adcc0d60f1053d74717d21d58c0048479e9cab51464ce0d2965b086bd0e2/pytest_asyncio-0.25.2.tar.gz", hash = "sha256:3f8ef9a98f45948ea91a0ed3dc4268b5326c0e7bce73892acc654df4262ad45f", size = 53950 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/61/d8/defa05ae50dcd6019a95527200d3b3980043df5aa445d40cb0ef9f7f98ab/pytest_asyncio-0.25.2-py3-none-any.whl", hash = "sha256:0d0bb693f7b99da304a0634afc0a4b19e49d5e0de2d670f38dc4bfa5727c5075", size = 19400 }, +] + +[[package]] +name = "pytest-cov" +version = "6.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "coverage", extra = ["toml"] }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/be/45/9b538de8cef30e17c7b45ef42f538a94889ed6a16f2387a6c89e73220651/pytest-cov-6.0.0.tar.gz", hash = "sha256:fde0b595ca248bb8e2d76f020b465f3b107c9632e6a1d1705f17834c89dcadc0", size = 66945 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/36/3b/48e79f2cd6a61dbbd4807b4ed46cb564b4fd50a76166b1c4ea5c1d9e2371/pytest_cov-6.0.0-py3-none-any.whl", hash = "sha256:eee6f1b9e61008bd34975a4d5bab25801eb31898b032dd55addc93e96fcaaa35", size = 22949 }, +] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892 }, +] + +[[package]] +name = "python-snappy" +version = "0.7.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cramjam" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/39/66/9185fbb6605ba92716d9f77fbb13c97eb671cd13c3ad56bd154016fbf08b/python_snappy-0.7.3.tar.gz", hash = "sha256:40216c1badfb2d38ac781ecb162a1d0ec40f8ee9747e610bcfefdfa79486cee3", size = 9337 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/86/c1/0ee413ddd639aebf22c85d6db39f136ccc10e6a4b4dd275a92b5c839de8d/python_snappy-0.7.3-py3-none-any.whl", hash = "sha256:074c0636cfcd97e7251330f428064050ac81a52c62ed884fc2ddebbb60ed7f50", size = 9155 }, +] + +[[package]] +name = "pyyaml" +version = "6.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9b/95/a3fac87cb7158e231b5a6012e438c647e1a87f09f8e0d123acec8ab8bf71/PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086", size = 184199 }, + { url = "https://files.pythonhosted.org/packages/c7/7a/68bd47624dab8fd4afbfd3c48e3b79efe09098ae941de5b58abcbadff5cb/PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf", size = 171758 }, + { url = "https://files.pythonhosted.org/packages/49/ee/14c54df452143b9ee9f0f29074d7ca5516a36edb0b4cc40c3f280131656f/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237", size = 718463 }, + { url = "https://files.pythonhosted.org/packages/4d/61/de363a97476e766574650d742205be468921a7b532aa2499fcd886b62530/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b", size = 719280 }, + { url = "https://files.pythonhosted.org/packages/6b/4e/1523cb902fd98355e2e9ea5e5eb237cbc5f3ad5f3075fa65087aa0ecb669/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed", size = 751239 }, + { url = "https://files.pythonhosted.org/packages/b7/33/5504b3a9a4464893c32f118a9cc045190a91637b119a9c881da1cf6b7a72/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180", size = 695802 }, + { url = "https://files.pythonhosted.org/packages/5c/20/8347dcabd41ef3a3cdc4f7b7a2aff3d06598c8779faa189cdbf878b626a4/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68", size = 720527 }, + { url = "https://files.pythonhosted.org/packages/be/aa/5afe99233fb360d0ff37377145a949ae258aaab831bde4792b32650a4378/PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99", size = 144052 }, + { url = "https://files.pythonhosted.org/packages/b5/84/0fa4b06f6d6c958d207620fc60005e241ecedceee58931bb20138e1e5776/PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e", size = 161774 }, + { url = "https://files.pythonhosted.org/packages/f8/aa/7af4e81f7acba21a4c6be026da38fd2b872ca46226673c89a758ebdc4fd2/PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774", size = 184612 }, + { url = "https://files.pythonhosted.org/packages/8b/62/b9faa998fd185f65c1371643678e4d58254add437edb764a08c5a98fb986/PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee", size = 172040 }, + { url = "https://files.pythonhosted.org/packages/ad/0c/c804f5f922a9a6563bab712d8dcc70251e8af811fce4524d57c2c0fd49a4/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c", size = 736829 }, + { url = "https://files.pythonhosted.org/packages/51/16/6af8d6a6b210c8e54f1406a6b9481febf9c64a3109c541567e35a49aa2e7/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317", size = 764167 }, + { url = "https://files.pythonhosted.org/packages/75/e4/2c27590dfc9992f73aabbeb9241ae20220bd9452df27483b6e56d3975cc5/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85", size = 762952 }, + { url = "https://files.pythonhosted.org/packages/9b/97/ecc1abf4a823f5ac61941a9c00fe501b02ac3ab0e373c3857f7d4b83e2b6/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4", size = 735301 }, + { url = "https://files.pythonhosted.org/packages/45/73/0f49dacd6e82c9430e46f4a027baa4ca205e8b0a9dce1397f44edc23559d/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e", size = 756638 }, + { url = "https://files.pythonhosted.org/packages/22/5f/956f0f9fc65223a58fbc14459bf34b4cc48dec52e00535c79b8db361aabd/PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5", size = 143850 }, + { url = "https://files.pythonhosted.org/packages/ed/23/8da0bbe2ab9dcdd11f4f4557ccaf95c10b9811b13ecced089d43ce59c3c8/PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44", size = 161980 }, + { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873 }, + { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302 }, + { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154 }, + { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223 }, + { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542 }, + { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164 }, + { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611 }, + { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591 }, + { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338 }, + { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309 }, + { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679 }, + { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428 }, + { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361 }, + { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523 }, + { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660 }, + { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597 }, + { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527 }, + { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446 }, + { url = "https://files.pythonhosted.org/packages/65/d8/b7a1db13636d7fb7d4ff431593c510c8b8fca920ade06ca8ef20015493c5/PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d", size = 184777 }, + { url = "https://files.pythonhosted.org/packages/0a/02/6ec546cd45143fdf9840b2c6be8d875116a64076218b61d68e12548e5839/PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f", size = 172318 }, + { url = "https://files.pythonhosted.org/packages/0e/9a/8cc68be846c972bda34f6c2a93abb644fb2476f4dcc924d52175786932c9/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290", size = 720891 }, + { url = "https://files.pythonhosted.org/packages/e9/6c/6e1b7f40181bc4805e2e07f4abc10a88ce4648e7e95ff1abe4ae4014a9b2/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12", size = 722614 }, + { url = "https://files.pythonhosted.org/packages/3d/32/e7bd8535d22ea2874cef6a81021ba019474ace0d13a4819c2a4bce79bd6a/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19", size = 737360 }, + { url = "https://files.pythonhosted.org/packages/d7/12/7322c1e30b9be969670b672573d45479edef72c9a0deac3bb2868f5d7469/PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e", size = 699006 }, + { url = "https://files.pythonhosted.org/packages/82/72/04fcad41ca56491995076630c3ec1e834be241664c0c09a64c9a2589b507/PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725", size = 723577 }, + { url = "https://files.pythonhosted.org/packages/ed/5e/46168b1f2757f1fcd442bc3029cd8767d88a98c9c05770d8b420948743bb/PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631", size = 144593 }, + { url = "https://files.pythonhosted.org/packages/19/87/5124b1c1f2412bb95c59ec481eaf936cd32f0fe2a7b16b97b81c4c017a6a/PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8", size = 162312 }, +] + +[[package]] +name = "readthedocs-sphinx-search" +version = "0.3.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8f/96/0c51439e3dbc634cf5328ffb173ff759b7fc9abf3276e78bf71d9fc0aa51/readthedocs-sphinx-search-0.3.2.tar.gz", hash = "sha256:277773bfa28566a86694c08e568d5a648cd80f22826545555a764d6d20c365fb", size = 21949 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/3c/41bc9d7d4d936a73e380423f23996bee1691e17598d8a03c062be6aac640/readthedocs_sphinx_search-0.3.2-py3-none-any.whl", hash = "sha256:58716fd21f01581e6e67bf3bc02e79c77e10dc58b5f8e4c7cc1977e013eda173", size = 21379 }, +] + +[[package]] +name = "requests" +version = "2.32.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3", version = "1.26.20", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "urllib3", version = "2.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928 }, +] + +[[package]] +name = "s3transfer" +version = "0.11.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "botocore" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1a/aa/fdd958c626b00e3f046d4004363e7f1a2aba4354f78d65ceb3b217fa5eb8/s3transfer-0.11.1.tar.gz", hash = "sha256:3f25c900a367c8b7f7d8f9c34edc87e300bde424f779dc9f0a8ae4f9df9264f6", size = 146952 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/ce/22673f4a85ccc640735b4f8d12178a0f41b5d3c6eda7f33756d10ce56901/s3transfer-0.11.1-py3-none-any.whl", hash = "sha256:8fa0aa48177be1f3425176dfe1ab85dcd3d962df603c3dbfc585e6bf857ef0ff", size = 84111 }, +] + +[[package]] +name = "service-identity" +version = "24.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "cryptography" }, + { name = "pyasn1" }, + { name = "pyasn1-modules" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/07/a5/dfc752b979067947261dbbf2543470c58efe735c3c1301dd870ef27830ee/service_identity-24.2.0.tar.gz", hash = "sha256:b8683ba13f0d39c6cd5d625d2c5f65421d6d707b013b375c355751557cbe8e09", size = 39245 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/08/2c/ca6dd598b384bc1ce581e24aaae0f2bed4ccac57749d5c3befbb5e742081/service_identity-24.2.0-py3-none-any.whl", hash = "sha256:6b047fbd8a84fd0bb0d55ebce4031e400562b9196e1e0d3e0fe2b8a59f6d4a85", size = 11364 }, +] + +[[package]] +name = "setuptools" +version = "75.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/92/ec/089608b791d210aec4e7f97488e67ab0d33add3efccb83a056cbafe3a2a6/setuptools-75.8.0.tar.gz", hash = "sha256:c5afc8f407c626b8313a86e10311dd3f661c6cd9c09d4bf8c15c0e11f9f2b0e6", size = 1343222 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/69/8a/b9dc7678803429e4a3bc9ba462fa3dd9066824d3c607490235c6a796be5a/setuptools-75.8.0-py3-none-any.whl", hash = "sha256:e3982f444617239225d675215d51f6ba05f845d4eec313da4418fdbb56fb27e3", size = 1228782 }, +] + +[[package]] +name = "simplejson" +version = "3.19.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3d/29/085111f19717f865eceaf0d4397bf3e76b08d60428b076b64e2a1903706d/simplejson-3.19.3.tar.gz", hash = "sha256:8e086896c36210ab6050f2f9f095a5f1e03c83fa0e7f296d6cba425411364680", size = 85237 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/39/24/260ad03435ce8ef2436031951134659c7161776ec3a78094b35b9375ceea/simplejson-3.19.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:50d8b742d74c449c4dcac570d08ce0f21f6a149d2d9cf7652dbf2ba9a1bc729a", size = 93660 }, + { url = "https://files.pythonhosted.org/packages/63/a1/dee207f357bcd6b106f2ca5129ee916c24993ba08b7dfbf9a37c22442ea9/simplejson-3.19.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd011fc3c1d88b779645495fdb8189fb318a26981eebcce14109460e062f209b", size = 75546 }, + { url = "https://files.pythonhosted.org/packages/80/7b/45ef1da43f54d209ce2ef59b7356cda13f810186c381f38ae23a4d2b1337/simplejson-3.19.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:637c4d4b81825c1f4d651e56210bd35b5604034b192b02d2d8f17f7ce8c18f42", size = 75602 }, + { url = "https://files.pythonhosted.org/packages/7f/4b/9a132382982f8127bc7ce5212a5585d83c174707c9dd698d0cb6a0d41882/simplejson-3.19.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f56eb03bc9e432bb81adc8ecff2486d39feb371abb442964ffb44f6db23b332", size = 138632 }, + { url = "https://files.pythonhosted.org/packages/76/37/012f5ad2f38afa28f8a6ad9da01dc0b64492ffbaf2a3f2f8a0e1fddf9c1d/simplejson-3.19.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ef59a53be400c1fad2c914b8d74c9d42384fed5174f9321dd021b7017fd40270", size = 146740 }, + { url = "https://files.pythonhosted.org/packages/69/b3/89640bd676e26ea2315b5aaf80712a6fbbb4338e4caf872d91448502a19b/simplejson-3.19.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:72e8abbc86fcac83629a030888b45fed3a404d54161118be52cb491cd6975d3e", size = 134440 }, + { url = "https://files.pythonhosted.org/packages/61/20/0035a288deaff05397d6cc0145b33f3dd2429b99cdc880de4c5eca41ca72/simplejson-3.19.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8efb03ca77bd7725dfacc9254df00d73e6f43013cf39bd37ef1a8ed0ebb5165", size = 137949 }, + { url = "https://files.pythonhosted.org/packages/5d/de/5b03fafe3003e32d179588953d38183af6c3747e95c7dcc668c4f9eb886a/simplejson-3.19.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:add8850db04b98507a8b62d248a326ecc8561e6d24336d1ca5c605bbfaab4cad", size = 139992 }, + { url = "https://files.pythonhosted.org/packages/d1/ce/e493116ff49fd215f7baa25195b8f684c91e65c153e2a57e04dc3f3a466b/simplejson-3.19.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fc3dc9fb413fc34c396f52f4c87de18d0bd5023804afa8ab5cc224deeb6a9900", size = 140320 }, + { url = "https://files.pythonhosted.org/packages/86/f3/a18b98a7a27548829f672754dd3940fb637a27981399838128d3e560087f/simplejson-3.19.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4dfa420bb9225dd33b6efdabde7c6a671b51150b9b1d9c4e5cd74d3b420b3fe1", size = 148625 }, + { url = "https://files.pythonhosted.org/packages/0f/55/d3da33ee3e708133da079b9d537693d7fef281e6f0d27921cc7e5b3ec523/simplejson-3.19.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7b5c472099b39b274dcde27f1113db8d818c9aa3ba8f78cbb8ad04a4c1ac2118", size = 141287 }, + { url = "https://files.pythonhosted.org/packages/17/e8/56184ab4d66bb64a6ff569f069b3796dfd943f9b961268fe0d403526fc17/simplejson-3.19.3-cp310-cp310-win32.whl", hash = "sha256:817abad79241ed4a507b3caf4d3f2be5079f39d35d4c550a061988986bffd2ec", size = 74143 }, + { url = "https://files.pythonhosted.org/packages/be/8f/a0089eff060f10a925f08b0a0f50854321484f1ac54b1895bbf4c9213dfe/simplejson-3.19.3-cp310-cp310-win_amd64.whl", hash = "sha256:dd5b9b1783e14803e362a558680d88939e830db2466f3fa22df5c9319f8eea94", size = 75643 }, + { url = "https://files.pythonhosted.org/packages/8c/bb/9ee3959e6929d228cf669b3f13f0edd43c5261b6cd69598640748b19ca35/simplejson-3.19.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e88abff510dcff903a18d11c2a75f9964e768d99c8d147839913886144b2065e", size = 91930 }, + { url = "https://files.pythonhosted.org/packages/ac/ae/a06523928af3a6783e2638cd4f6035c3e32de1c1063d563d9060c8d2f1ad/simplejson-3.19.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:934a50a614fb831614db5dbfba35127ee277624dda4d15895c957d2f5d48610c", size = 74787 }, + { url = "https://files.pythonhosted.org/packages/c3/58/fea732e48a7540035fe46d39e6fd77679f5810311d31da8661ce7a18210a/simplejson-3.19.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:212fce86a22188b0c7f53533b0f693ea9605c1a0f02c84c475a30616f55a744d", size = 74612 }, + { url = "https://files.pythonhosted.org/packages/ab/4d/15718f20cb0e3875b8af9597d6bb3bfbcf1383834b82b6385ee9ac0b72a9/simplejson-3.19.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d9e8f836688a8fabe6a6b41b334aa550a6823f7b4ac3d3712fc0ad8655be9a8", size = 143550 }, + { url = "https://files.pythonhosted.org/packages/93/44/815a4343774760f7a82459c8f6a4d8268b4b6d23f81e7b922a5e2ca79171/simplejson-3.19.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:23228037dc5d41c36666384062904d74409a62f52283d9858fa12f4c22cffad1", size = 153284 }, + { url = "https://files.pythonhosted.org/packages/9d/52/d3202d9bba95444090d1c98e43da3c10907875babf63ed3c134d1b9437e3/simplejson-3.19.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0791f64fed7d4abad639491f8a6b1ba56d3c604eb94b50f8697359b92d983f36", size = 141518 }, + { url = "https://files.pythonhosted.org/packages/b7/d4/850948bcbcfe0b4a6c69dfde10e245d3a1ea45252f16a1e2308a3b06b1da/simplejson-3.19.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4f614581b61a26fbbba232a1391f6cee82bc26f2abbb6a0b44a9bba25c56a1c", size = 144688 }, + { url = "https://files.pythonhosted.org/packages/58/d2/b8dcb0a07d9cd54c47f9fe8733dbb83891d1efe4fc786d9dfc8781cc04f9/simplejson-3.19.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1df0aaf1cb787fdf34484ed4a1f0c545efd8811f6028623290fef1a53694e597", size = 144534 }, + { url = "https://files.pythonhosted.org/packages/a9/95/1e92d99039041f596e0923ec4f9153244acaf3830944dc69a7c11b23ceaa/simplejson-3.19.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:951095be8d4451a7182403354c22ec2de3e513e0cc40408b689af08d02611588", size = 146565 }, + { url = "https://files.pythonhosted.org/packages/21/04/c96aeb3a74031255e4cbcc0ca1b6ebfb5549902f0a065f06d65ce8447c0c/simplejson-3.19.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:2a954b30810988feeabde843e3263bf187697e0eb5037396276db3612434049b", size = 155014 }, + { url = "https://files.pythonhosted.org/packages/b7/41/e28a28593afc4a75d8999d057bfb7c73a103e35f927e66f4bb92571787ae/simplejson-3.19.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c40df31a75de98db2cdfead6074d4449cd009e79f54c1ebe5e5f1f153c68ad20", size = 148092 }, + { url = "https://files.pythonhosted.org/packages/2b/82/1c81a3af06f937afb6d2e9d74a465c0e0ae6db444d1bf2a436ea26de1965/simplejson-3.19.3-cp311-cp311-win32.whl", hash = "sha256:7e2a098c21ad8924076a12b6c178965d88a0ad75d1de67e1afa0a66878f277a5", size = 73942 }, + { url = "https://files.pythonhosted.org/packages/65/be/d8ab9717f471be3c114f16abd8be21d9a6a0a09b9b49177d93d64d3717d9/simplejson-3.19.3-cp311-cp311-win_amd64.whl", hash = "sha256:c9bedebdc5fdad48af8783022bae307746d54006b783007d1d3c38e10872a2c6", size = 75469 }, + { url = "https://files.pythonhosted.org/packages/20/15/513fea93fafbdd4993eacfcb762965b2ff3d29e618c029e2956174d68c4b/simplejson-3.19.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:66a0399e21c2112acacfebf3d832ebe2884f823b1c7e6d1363f2944f1db31a99", size = 92921 }, + { url = "https://files.pythonhosted.org/packages/a4/4f/998a907ae1a6c104dc0ee48aa248c2478490152808d34d8e07af57f396c3/simplejson-3.19.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6ef9383c5e05f445be60f1735c1816163c874c0b1ede8bb4390aff2ced34f333", size = 75311 }, + { url = "https://files.pythonhosted.org/packages/db/44/acd6122201e927451869d45952b9ab1d3025cdb5e61548d286d08fbccc08/simplejson-3.19.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:42e5acf80d4d971238d4df97811286a044d720693092b20a56d5e56b7dcc5d09", size = 74964 }, + { url = "https://files.pythonhosted.org/packages/27/ca/d0a1e8f16e1bbdc0b8c6d88166f45f565ed7285f53928cfef3b6ce78f14d/simplejson-3.19.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0b0efc7279d768db7c74d3d07f0b5c81280d16ae3fb14e9081dc903e8360771", size = 150106 }, + { url = "https://files.pythonhosted.org/packages/63/59/0554b78cf26c98e2b9cae3f44723bd72c2394e2afec1a14eedc6211f7187/simplejson-3.19.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0552eb06e7234da892e1d02365cd2b7b2b1f8233aa5aabdb2981587b7cc92ea0", size = 158347 }, + { url = "https://files.pythonhosted.org/packages/b2/fe/9f30890352e431e8508cc569912d3322147d3e7e4f321e48c0adfcb4c97d/simplejson-3.19.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5bf6a3b9a7d7191471b464fe38f684df10eb491ec9ea454003edb45a011ab187", size = 148456 }, + { url = "https://files.pythonhosted.org/packages/37/e3/663a09542ee021d4131162f7a164cb2e7f04ef48433a67591738afbf12ea/simplejson-3.19.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7017329ca8d4dca94ad5e59f496e5fc77630aecfc39df381ffc1d37fb6b25832", size = 152190 }, + { url = "https://files.pythonhosted.org/packages/31/20/4e0c4d35e10ff6465003bec304316d822a559a1c38c66ef6892ca199c207/simplejson-3.19.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:67a20641afebf4cfbcff50061f07daad1eace6e7b31d7622b6fa2c40d43900ba", size = 149846 }, + { url = "https://files.pythonhosted.org/packages/08/7a/46e2e072cac3987cbb05946f25167f0ad2fe536748e7405953fd6661a486/simplejson-3.19.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:dd6a7dabcc4c32daf601bc45e01b79175dde4b52548becea4f9545b0a4428169", size = 151714 }, + { url = "https://files.pythonhosted.org/packages/7f/7d/dbeeac10eb61d5d8858d0bb51121a21050d281dc83af4c557f86da28746c/simplejson-3.19.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:08f9b443a94e72dd02c87098c96886d35790e79e46b24e67accafbf13b73d43b", size = 158777 }, + { url = "https://files.pythonhosted.org/packages/fc/8f/a98bdbb799c6a4a884b5823db31785a96ba895b4b0f4d8ac345d6fe98bbf/simplejson-3.19.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fa97278ae6614346b5ca41a45a911f37a3261b57dbe4a00602048652c862c28b", size = 154230 }, + { url = "https://files.pythonhosted.org/packages/b1/db/852eebceb85f969ae40e06babed1a93d3bacb536f187d7a80ff5823a5979/simplejson-3.19.3-cp312-cp312-win32.whl", hash = "sha256:ef28c3b328d29b5e2756903aed888960bc5df39b4c2eab157ae212f70ed5bf74", size = 74002 }, + { url = "https://files.pythonhosted.org/packages/fe/68/9f0e5df0651cb79ef83cba1378765a00ee8038e6201cc82b8e7178a7778e/simplejson-3.19.3-cp312-cp312-win_amd64.whl", hash = "sha256:1e662336db50ad665777e6548b5076329a94a0c3d4a0472971c588b3ef27de3a", size = 75596 }, + { url = "https://files.pythonhosted.org/packages/93/3a/5896821ed543899fcb9c4256c7e71bb110048047349a00f42bc8b8fb379f/simplejson-3.19.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:0959e6cb62e3994b5a40e31047ff97ef5c4138875fae31659bead691bed55896", size = 92931 }, + { url = "https://files.pythonhosted.org/packages/39/15/5d33d269440912ee40d856db0c8be2b91aba7a219690ab01f86cb0edd590/simplejson-3.19.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7a7bfad839c624e139a4863007233a3f194e7c51551081f9789cba52e4da5167", size = 75318 }, + { url = "https://files.pythonhosted.org/packages/2a/8d/2e7483a2bf7ec53acf7e012bafbda79d7b34f90471dda8e424544a59d484/simplejson-3.19.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:afab2f7f2486a866ff04d6d905e9386ca6a231379181a3838abce1f32fbdcc37", size = 74971 }, + { url = "https://files.pythonhosted.org/packages/4d/9d/9bdf34437c8834a7cf7246f85e9d5122e30579f512c10a0c2560e994294f/simplejson-3.19.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d00313681015ac498e1736b304446ee6d1c72c5b287cd196996dad84369998f7", size = 150112 }, + { url = "https://files.pythonhosted.org/packages/a7/e2/1f2ae2d89eaf85f6163c82150180aae5eaa18085cfaf892f8a57d4c51cbd/simplejson-3.19.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d936ae682d5b878af9d9eb4d8bb1fdd5e41275c8eb59ceddb0aeed857bb264a2", size = 158354 }, + { url = "https://files.pythonhosted.org/packages/60/83/26f610adf234c8492b3f30501e12f2271e67790f946c6898fe0c58aefe99/simplejson-3.19.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01c6657485393f2e9b8177c77a7634f13ebe70d5e6de150aae1677d91516ce6b", size = 148455 }, + { url = "https://files.pythonhosted.org/packages/b5/4b/109af50006af77133653c55b5b91b4bd2d579ff8254ce11216c0b75f911b/simplejson-3.19.3-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a6a750d3c7461b1c47cfc6bba8d9e57a455e7c5f80057d2a82f738040dd1129", size = 152191 }, + { url = "https://files.pythonhosted.org/packages/75/dc/108872a8825cbd99ae6f4334e0490ff1580367baf12198bcaf988f6820ba/simplejson-3.19.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ea7a4a998c87c5674a27089e022110a1a08a7753f21af3baf09efe9915c23c3c", size = 149954 }, + { url = "https://files.pythonhosted.org/packages/eb/be/deec1d947a5d0472276ab4a4d1a9378dc5ee27f3dc9e54d4f62ffbad7a08/simplejson-3.19.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:6300680d83a399be2b8f3b0ef7ef90b35d2a29fe6e9c21438097e0938bbc1564", size = 151812 }, + { url = "https://files.pythonhosted.org/packages/e9/58/4ee130702d36b1551ef66e7587eefe56651f3669255bf748cd71691e2434/simplejson-3.19.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:ab69f811a660c362651ae395eba8ce84f84c944cea0df5718ea0ba9d1e4e7252", size = 158880 }, + { url = "https://files.pythonhosted.org/packages/0f/e1/59cc6a371b60f89e3498d9f4c8109f6b7359094d453f5fe80b2677b777b0/simplejson-3.19.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:256e09d0f94d9c3d177d9e95fd27a68c875a4baa2046633df387b86b652f5747", size = 154344 }, + { url = "https://files.pythonhosted.org/packages/79/45/1b36044670016f5cb25ebd92497427d2d1711ecb454d00f71eb9a00b77cc/simplejson-3.19.3-cp313-cp313-win32.whl", hash = "sha256:2c78293470313aefa9cfc5e3f75ca0635721fb016fb1121c1c5b0cb8cc74712a", size = 74002 }, + { url = "https://files.pythonhosted.org/packages/e2/58/b06226e6b0612f2b1fa13d5273551da259f894566b1eef32249ddfdcce44/simplejson-3.19.3-cp313-cp313-win_amd64.whl", hash = "sha256:3bbcdc438dc1683b35f7a8dc100960c721f922f9ede8127f63bed7dfded4c64c", size = 75599 }, + { url = "https://files.pythonhosted.org/packages/9a/3d/e7f1caf7fa8c004c30e2c0595a22646a178344a7f53924c11c3d263a8623/simplejson-3.19.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b5587feda2b65a79da985ae6d116daf6428bf7489992badc29fc96d16cd27b05", size = 93646 }, + { url = "https://files.pythonhosted.org/packages/01/40/ff5cae1b4ff35c7822456ad7d098371d697479d418194064b8aff8142d70/simplejson-3.19.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e0d2b00ecbcd1a3c5ea1abc8bb99a26508f758c1759fd01c3be482a3655a176f", size = 75544 }, + { url = "https://files.pythonhosted.org/packages/56/a8/dbe799f3620a08337ff5f3be27df7b5ba5beb1ee06acaf75f3cb46f8d650/simplejson-3.19.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:32a3ada8f3ea41db35e6d37b86dade03760f804628ec22e4fe775b703d567426", size = 75593 }, + { url = "https://files.pythonhosted.org/packages/d5/53/6ed299b9201ea914bb6a178a7e65413ed1969981533f50bfbe8a215be98f/simplejson-3.19.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f455672f4738b0f47183c5896e3606cd65c9ddee3805a4d18e8c96aa3f47c84", size = 138077 }, + { url = "https://files.pythonhosted.org/packages/1c/73/14306559157a6faedb4ecae28ad907b64b5359be5c9ec79233546acb96a4/simplejson-3.19.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2b737a5fefedb8333fa50b8db3dcc9b1d18fd6c598f89fa7debff8b46bf4e511", size = 146307 }, + { url = "https://files.pythonhosted.org/packages/5b/1a/7994abb33e53ec972dd5e6dbb337b9070d3ad96017c4cff9d5dc83678ad4/simplejson-3.19.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb47ee773ce67476a960e2db4a0a906680c54f662521550828c0cc57d0099426", size = 133922 }, + { url = "https://files.pythonhosted.org/packages/08/15/8b4e1a8c7729b37797d0eab1381f517f928bd323d17efa7f4414c3565e1f/simplejson-3.19.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eed8cd98a7b24861da9d3d937f5fbfb6657350c547528a117297fe49e3960667", size = 137367 }, + { url = "https://files.pythonhosted.org/packages/59/9a/f5b786fe611395564d3e84f58f668242a7a2e674b4fac71b4e6b21d6d2b7/simplejson-3.19.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:619756f1dd634b5bdf57d9a3914300526c3b348188a765e45b8b08eabef0c94e", size = 139513 }, + { url = "https://files.pythonhosted.org/packages/4d/87/c310daf5e2f10306de3720f075f8ed74cbe83396879b8c55e832393233a5/simplejson-3.19.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:dd7230d061e755d60a4d5445bae854afe33444cdb182f3815cff26ac9fb29a15", size = 139749 }, + { url = "https://files.pythonhosted.org/packages/fd/89/690880e1639b421a919d36fadf1fc364a38c3bc4f208dc11627426cdbe98/simplejson-3.19.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:101a3c8392028cd704a93c7cba8926594e775ca3c91e0bee82144e34190903f1", size = 148103 }, + { url = "https://files.pythonhosted.org/packages/a3/31/ef13eda5b5a0d8d9555b70151ee2956f63b845e1fac4ff904339dfb4dd89/simplejson-3.19.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1e557712fc79f251673aeb3fad3501d7d4da3a27eff0857af2e1d1afbbcf6685", size = 140740 }, + { url = "https://files.pythonhosted.org/packages/39/5f/26b0a036592e45a2cb4be2f53d8827257e169bd5c84744a1aac89b0ff56f/simplejson-3.19.3-cp39-cp39-win32.whl", hash = "sha256:0bc5544e3128891bf613b9f71813ee2ec9c11574806f74dd8bb84e5e95bf64a2", size = 74115 }, + { url = "https://files.pythonhosted.org/packages/32/06/a35e2e1d8850aff1cf1320d4887bd5f97921c8964a1e260983d38d5d6c17/simplejson-3.19.3-cp39-cp39-win_amd64.whl", hash = "sha256:06662392e4913dc8846d6a71a6d5de86db5fba244831abe1dd741d62a4136764", size = 75636 }, + { url = "https://files.pythonhosted.org/packages/0d/e7/f9fafbd4f39793a20cc52e77bbd766f7384312526d402c382928dc7667f6/simplejson-3.19.3-py3-none-any.whl", hash = "sha256:49cc4c7b940d43bd12bf87ec63f28cbc4964fc4e12c031cc8cd01650f43eb94e", size = 57004 }, +] + +[[package]] +name = "six" +version = "1.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050 }, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235 }, +] + +[[package]] +name = "snowballstemmer" +version = "2.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/44/7b/af302bebf22c749c56c9c3e8ae13190b5b5db37a33d9068652e8f73b7089/snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1", size = 86699 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ed/dc/c02e01294f7265e63a7315fe086dd1df7dacb9f840a804da846b96d01b96/snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a", size = 93002 }, +] + +[[package]] +name = "soupsieve" +version = "2.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d7/ce/fbaeed4f9fb8b2daa961f90591662df6a86c1abf25c548329a86920aedfb/soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb", size = 101569 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/c2/fe97d779f3ef3b15f05c94a2f1e3d21732574ed441687474db9d342a7315/soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9", size = 36186 }, +] + +[[package]] +name = "sphinx" +version = "7.4.7" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.10'", +] +dependencies = [ + { name = "alabaster", version = "0.7.16", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "babel", marker = "python_full_version < '3.10'" }, + { name = "colorama", marker = "python_full_version < '3.10' and sys_platform == 'win32'" }, + { name = "docutils", marker = "python_full_version < '3.10'" }, + { name = "imagesize", marker = "python_full_version < '3.10'" }, + { name = "importlib-metadata", marker = "python_full_version < '3.10'" }, + { name = "jinja2", marker = "python_full_version < '3.10'" }, + { name = "packaging", marker = "python_full_version < '3.10'" }, + { name = "pygments", marker = "python_full_version < '3.10'" }, + { name = "requests", marker = "python_full_version < '3.10'" }, + { name = "snowballstemmer", marker = "python_full_version < '3.10'" }, + { name = "sphinxcontrib-applehelp", marker = "python_full_version < '3.10'" }, + { name = "sphinxcontrib-devhelp", marker = "python_full_version < '3.10'" }, + { name = "sphinxcontrib-htmlhelp", marker = "python_full_version < '3.10'" }, + { name = "sphinxcontrib-jsmath", marker = "python_full_version < '3.10'" }, + { name = "sphinxcontrib-qthelp", marker = "python_full_version < '3.10'" }, + { name = "sphinxcontrib-serializinghtml", marker = "python_full_version < '3.10'" }, + { name = "tomli", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5b/be/50e50cb4f2eff47df05673d361095cafd95521d2a22521b920c67a372dcb/sphinx-7.4.7.tar.gz", hash = "sha256:242f92a7ea7e6c5b406fdc2615413890ba9f699114a9c09192d7dfead2ee9cfe", size = 8067911 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0d/ef/153f6803c5d5f8917dbb7f7fcf6d34a871ede3296fa89c2c703f5f8a6c8e/sphinx-7.4.7-py3-none-any.whl", hash = "sha256:c2419e2135d11f1951cd994d6eb18a1835bd8fdd8429f9ca375dc1f3281bd239", size = 3401624 }, +] + +[[package]] +name = "sphinx" +version = "8.1.3" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", +] +dependencies = [ + { name = "alabaster", version = "1.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "babel", marker = "python_full_version >= '3.10'" }, + { name = "colorama", marker = "python_full_version >= '3.10' and sys_platform == 'win32'" }, + { name = "docutils", marker = "python_full_version >= '3.10'" }, + { name = "imagesize", marker = "python_full_version >= '3.10'" }, + { name = "jinja2", marker = "python_full_version >= '3.10'" }, + { name = "packaging", marker = "python_full_version >= '3.10'" }, + { name = "pygments", marker = "python_full_version >= '3.10'" }, + { name = "requests", marker = "python_full_version >= '3.10'" }, + { name = "snowballstemmer", marker = "python_full_version >= '3.10'" }, + { name = "sphinxcontrib-applehelp", marker = "python_full_version >= '3.10'" }, + { name = "sphinxcontrib-devhelp", marker = "python_full_version >= '3.10'" }, + { name = "sphinxcontrib-htmlhelp", marker = "python_full_version >= '3.10'" }, + { name = "sphinxcontrib-jsmath", marker = "python_full_version >= '3.10'" }, + { name = "sphinxcontrib-qthelp", marker = "python_full_version >= '3.10'" }, + { name = "sphinxcontrib-serializinghtml", marker = "python_full_version >= '3.10'" }, + { name = "tomli", marker = "python_full_version == '3.10.*'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/be0b61178fe2cdcb67e2a92fc9ebb488e3c51c4f74a36a7824c0adf23425/sphinx-8.1.3.tar.gz", hash = "sha256:43c1911eecb0d3e161ad78611bc905d1ad0e523e4ddc202a58a821773dc4c927", size = 8184611 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/26/60/1ddff83a56d33aaf6f10ec8ce84b4c007d9368b21008876fceda7e7381ef/sphinx-8.1.3-py3-none-any.whl", hash = "sha256:09719015511837b76bf6e03e42eb7595ac8c2e41eeb9c29c5b755c6b677992a2", size = 3487125 }, +] + +[[package]] +name = "sphinx-autobuild" +version = "2024.10.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama" }, + { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "starlette" }, + { name = "uvicorn" }, + { name = "watchfiles" }, + { name = "websockets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a5/2c/155e1de2c1ba96a72e5dba152c509a8b41e047ee5c2def9e9f0d812f8be7/sphinx_autobuild-2024.10.3.tar.gz", hash = "sha256:248150f8f333e825107b6d4b86113ab28fa51750e5f9ae63b59dc339be951fb1", size = 14023 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/c0/eba125db38c84d3c74717008fd3cb5000b68cd7e2cbafd1349c6a38c3d3b/sphinx_autobuild-2024.10.3-py3-none-any.whl", hash = "sha256:158e16c36f9d633e613c9aaf81c19b0fc458ca78b112533b20dafcda430d60fa", size = 11908 }, +] + +[[package]] +name = "sphinx-basic-ng" +version = "1.0.0b2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/98/0b/a866924ded68efec7a1759587a4e478aec7559d8165fac8b2ad1c0e774d6/sphinx_basic_ng-1.0.0b2.tar.gz", hash = "sha256:9ec55a47c90c8c002b5960c57492ec3021f5193cb26cebc2dc4ea226848651c9", size = 20736 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/dd/018ce05c532a22007ac58d4f45232514cd9d6dd0ee1dc374e309db830983/sphinx_basic_ng-1.0.0b2-py3-none-any.whl", hash = "sha256:eb09aedbabfb650607e9b4b68c9d240b90b1e1be221d6ad71d61c52e29f7932b", size = 22496 }, +] + +[[package]] +name = "sphinx-rtd-theme" +version = "3.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "docutils" }, + { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "sphinxcontrib-jquery" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/91/44/c97faec644d29a5ceddd3020ae2edffa69e7d00054a8c7a6021e82f20335/sphinx_rtd_theme-3.0.2.tar.gz", hash = "sha256:b7457bc25dda723b20b086a670b9953c859eab60a2a03ee8eb2bb23e176e5f85", size = 7620463 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/85/77/46e3bac77b82b4df5bb5b61f2de98637724f246b4966cfc34bc5895d852a/sphinx_rtd_theme-3.0.2-py2.py3-none-any.whl", hash = "sha256:422ccc750c3a3a311de4ae327e82affdaf59eb695ba4936538552f3b00f4ee13", size = 7655561 }, +] + +[[package]] +name = "sphinxcontrib-applehelp" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/6e/b837e84a1a704953c62ef8776d45c3e8d759876b4a84fe14eba2859106fe/sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1", size = 20053 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5d/85/9ebeae2f76e9e77b952f4b274c27238156eae7979c5421fba91a28f4970d/sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5", size = 119300 }, +] + +[[package]] +name = "sphinxcontrib-devhelp" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f6/d2/5beee64d3e4e747f316bae86b55943f51e82bb86ecd325883ef65741e7da/sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad", size = 12967 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/35/7a/987e583882f985fe4d7323774889ec58049171828b58c2217e7f79cdf44e/sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2", size = 82530 }, +] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/93/983afd9aa001e5201eab16b5a444ed5b9b0a7a010541e0ddfbbfd0b2470c/sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9", size = 22617 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0a/7b/18a8c0bcec9182c05a0b3ec2a776bba4ead82750a55ff798e8d406dae604/sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8", size = 98705 }, +] + +[[package]] +name = "sphinxcontrib-jquery" +version = "4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/de/f3/aa67467e051df70a6330fe7770894b3e4f09436dea6881ae0b4f3d87cad8/sphinxcontrib-jquery-4.1.tar.gz", hash = "sha256:1620739f04e36a2c779f1a131a2dfd49b2fd07351bf1968ced074365933abc7a", size = 122331 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/85/749bd22d1a68db7291c89e2ebca53f4306c3f205853cf31e9de279034c3c/sphinxcontrib_jquery-4.1-py2.py3-none-any.whl", hash = "sha256:f936030d7d0147dd026a4f2b5a57343d233f1fc7b363f68b3d4f1cb0993878ae", size = 121104 }, +] + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/e8/9ed3830aeed71f17c026a07a5097edcf44b692850ef215b161b8ad875729/sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8", size = 5787 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/42/4c8646762ee83602e3fb3fbe774c2fac12f317deb0b5dbeeedd2d3ba4b77/sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178", size = 5071 }, +] + +[[package]] +name = "sphinxcontrib-qthelp" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/68/bc/9104308fc285eb3e0b31b67688235db556cd5b0ef31d96f30e45f2e51cae/sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab", size = 17165 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/27/83/859ecdd180cacc13b1f7e857abf8582a64552ea7a061057a6c716e790fce/sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb", size = 88743 }, +] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3b/44/6716b257b0aa6bfd51a1b31665d1c205fb12cb5ad56de752dfa15657de2f/sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d", size = 16080 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/52/a7/d2782e4e3f77c8450f727ba74a8f12756d5ba823d81b941f1b04da9d033a/sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331", size = 92072 }, +] + +[[package]] +name = "sphinxcontrib-shellcheck" +version = "1.1.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "decorator" }, + { name = "docutils" }, + { name = "six" }, + { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ef/2b/20717a5e0c7ee99dfd5fcdf11a8cf0ab02533cf62775f24d344ea5cf48c1/sphinxcontrib-shellcheck-1.1.2.zip", hash = "sha256:475a3ae12a1cfc1bc26cff57f0dd15561213818e3b470b3eacc4bb8be7b129c0", size = 338739 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/06/9c/1ff7fe5174f944fac0fcb53bdaac7b98d73a98dd2ca800d95af6af9edb9a/sphinxcontrib_shellcheck-1.1.2-py35-none-any.whl", hash = "sha256:c0449dc9402521ab1d05a1b9eb8c9099707da64824341686dab4f620dc688514", size = 11532 }, + { url = "https://files.pythonhosted.org/packages/9f/8c/833388d3127d8dc0d5558bf52225eb20ed024ac46ef8ef4bffe7298ceb3d/sphinxcontrib_shellcheck-1.1.2-py36-none-any.whl", hash = "sha256:bcd8ffd26e6430deff9ffd10705683b502ace3fc8b4d1ba84496b3752f65fe52", size = 11533 }, + { url = "https://files.pythonhosted.org/packages/9d/b5/cdc74763bcf0916f47d053830c00114f1de65d97ea2281b66bbf2a587b8a/sphinxcontrib_shellcheck-1.1.2-py37-none-any.whl", hash = "sha256:46d1aba8201bbfc7a2c51e08446cab36bdab318c997223c8fc40733a5eedc71f", size = 11533 }, + { url = "https://files.pythonhosted.org/packages/58/ba/cf15480bc238a15e10604ee7f0e3e20ea0bf9a55a4f0b4e50571e8d13e60/sphinxcontrib_shellcheck-1.1.2-py38-none-any.whl", hash = "sha256:4c5f2840418cd1d7d662c0b3f51a07625f1a8f92755b19347ce85e8258e9d847", size = 11532 }, +] + +[[package]] +name = "starlette" +version = "0.45.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "typing-extensions", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/90/4f/e1c9f4ec3dae67a94c9285ed275355d5f7cf0f3a5c34538c8ae5412af550/starlette-0.45.2.tar.gz", hash = "sha256:bba1831d15ae5212b22feab2f218bab6ed3cd0fc2dc1d4442443bb1ee52260e0", size = 2574026 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/aa/ab/fe4f57c83620b39dfc9e7687ebad59129ff05170b99422105019d9a65eec/starlette-0.45.2-py3-none-any.whl", hash = "sha256:4daec3356fb0cb1e723a5235e5beaf375d2259af27532958e2d79df549dad9da", size = 71505 }, +] + +[[package]] +name = "tomli" +version = "2.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", size = 131077 }, + { url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", size = 123429 }, + { url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", size = 226067 }, + { url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", size = 236030 }, + { url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", size = 240898 }, + { url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", size = 229894 }, + { url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", size = 245319 }, + { url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", size = 238273 }, + { url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", size = 98310 }, + { url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", size = 108309 }, + { url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", size = 132762 }, + { url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", size = 123453 }, + { url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", size = 233486 }, + { url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", size = 242349 }, + { url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", size = 252159 }, + { url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", size = 237243 }, + { url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", size = 259645 }, + { url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584 }, + { url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", size = 98875 }, + { url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", size = 109418 }, + { url = "https://files.pythonhosted.org/packages/04/90/2ee5f2e0362cb8a0b6499dc44f4d7d48f8fff06d28ba46e6f1eaa61a1388/tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7", size = 132708 }, + { url = "https://files.pythonhosted.org/packages/c0/ec/46b4108816de6b385141f082ba99e315501ccd0a2ea23db4a100dd3990ea/tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c", size = 123582 }, + { url = "https://files.pythonhosted.org/packages/a0/bd/b470466d0137b37b68d24556c38a0cc819e8febe392d5b199dcd7f578365/tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13", size = 232543 }, + { url = "https://files.pythonhosted.org/packages/d9/e5/82e80ff3b751373f7cead2815bcbe2d51c895b3c990686741a8e56ec42ab/tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281", size = 241691 }, + { url = "https://files.pythonhosted.org/packages/05/7e/2a110bc2713557d6a1bfb06af23dd01e7dde52b6ee7dadc589868f9abfac/tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272", size = 251170 }, + { url = "https://files.pythonhosted.org/packages/64/7b/22d713946efe00e0adbcdfd6d1aa119ae03fd0b60ebed51ebb3fa9f5a2e5/tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140", size = 236530 }, + { url = "https://files.pythonhosted.org/packages/38/31/3a76f67da4b0cf37b742ca76beaf819dca0ebef26d78fc794a576e08accf/tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2", size = 258666 }, + { url = "https://files.pythonhosted.org/packages/07/10/5af1293da642aded87e8a988753945d0cf7e00a9452d3911dd3bb354c9e2/tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744", size = 243954 }, + { url = "https://files.pythonhosted.org/packages/5b/b9/1ed31d167be802da0fc95020d04cd27b7d7065cc6fbefdd2f9186f60d7bd/tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec", size = 98724 }, + { url = "https://files.pythonhosted.org/packages/c7/32/b0963458706accd9afcfeb867c0f9175a741bf7b19cd424230714d722198/tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69", size = 109383 }, + { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257 }, +] + +[[package]] +name = "typing-extensions" +version = "4.12.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/df/db/f35a00659bc03fec321ba8bce9420de607a1d37f8342eee1863174c69557/typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8", size = 85321 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", size = 37438 }, +] + +[[package]] +name = "urllib3" +version = "1.26.20" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.10'", +] +sdist = { url = "https://files.pythonhosted.org/packages/e4/e8/6ff5e6bc22095cfc59b6ea711b687e2b7ed4bdb373f7eeec370a97d7392f/urllib3-1.26.20.tar.gz", hash = "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32", size = 307380 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/33/cf/8435d5a7159e2a9c83a95896ed596f68cf798005fe107cc655b5c5c14704/urllib3-1.26.20-py2.py3-none-any.whl", hash = "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e", size = 144225 }, +] + +[[package]] +name = "urllib3" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.10'", +] +sdist = { url = "https://files.pythonhosted.org/packages/aa/63/e53da845320b757bf29ef6a9062f5c669fe997973f966045cb019c3f4b66/urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d", size = 307268 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/19/4ec628951a74043532ca2cf5d97b7b14863931476d117c471e8e2b1eb39f/urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df", size = 128369 }, +] + +[[package]] +name = "uvicorn" +version = "0.34.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "h11" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4b/4d/938bd85e5bf2edeec766267a5015ad969730bb91e31b44021dfe8b22df6c/uvicorn-0.34.0.tar.gz", hash = "sha256:404051050cd7e905de2c9a7e61790943440b3416f49cb409f965d9dcd0fa73e9", size = 76568 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/61/14/33a3a1352cfa71812a3a21e8c9bfb83f60b0011f5e36f2b1399d51928209/uvicorn-0.34.0-py3-none-any.whl", hash = "sha256:023dc038422502fa28a09c7a30bf2b6991512da7dcdb8fd35fe57cfc154126f4", size = 62315 }, +] + +[[package]] +name = "virtualenv" +version = "20.29.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "distlib" }, + { name = "filelock" }, + { name = "platformdirs" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a7/ca/f23dcb02e161a9bba141b1c08aa50e8da6ea25e6d780528f1d385a3efe25/virtualenv-20.29.1.tar.gz", hash = "sha256:b8b8970138d32fb606192cb97f6cd4bb644fa486be9308fb9b63f81091b5dc35", size = 7658028 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/89/9b/599bcfc7064fbe5740919e78c5df18e5dceb0887e676256a1061bb5ae232/virtualenv-20.29.1-py3-none-any.whl", hash = "sha256:4e4cb403c0b0da39e13b46b1b2476e505cb0046b25f242bee80f62bf990b2779", size = 4282379 }, +] + +[[package]] +name = "watchfiles" +version = "1.0.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f5/26/c705fc77d0a9ecdb9b66f1e2976d95b81df3cae518967431e7dbf9b5e219/watchfiles-1.0.4.tar.gz", hash = "sha256:6ba473efd11062d73e4f00c2b730255f9c1bdd73cd5f9fe5b5da8dbd4a717205", size = 94625 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/14/02/22fcaed0396730b0d362bc8d1ffb3be2658fd473eecbb2ba84243e157f11/watchfiles-1.0.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ba5bb3073d9db37c64520681dd2650f8bd40902d991e7b4cfaeece3e32561d08", size = 395212 }, + { url = "https://files.pythonhosted.org/packages/e9/3d/ec5a2369a46edf3ebe092c39d9ae48e8cb6dacbde51c4b4f98936c524269/watchfiles-1.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9f25d0ba0fe2b6d2c921cf587b2bf4c451860086534f40c384329fb96e2044d1", size = 384815 }, + { url = "https://files.pythonhosted.org/packages/df/b4/898991cececbe171e67142c31905510203649569d9817848f47c4177ee42/watchfiles-1.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47eb32ef8c729dbc4f4273baece89398a4d4b5d21a1493efea77a17059f4df8a", size = 450680 }, + { url = "https://files.pythonhosted.org/packages/58/f7/d4aa3000e812cfb5e5c2c6c0a3ec9d0a46a42489a8727edd160631c4e210/watchfiles-1.0.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:076f293100db3b0b634514aa0d294b941daa85fc777f9c698adb1009e5aca0b1", size = 455923 }, + { url = "https://files.pythonhosted.org/packages/dd/95/7e2e4c6aba1b02fb5c76d2f6a450b85215921ec5f8f7ad5efd075369563f/watchfiles-1.0.4-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1eacd91daeb5158c598fe22d7ce66d60878b6294a86477a4715154990394c9b3", size = 482339 }, + { url = "https://files.pythonhosted.org/packages/bb/67/4265b0fabcc2ef2c9e3e8802ba7908cf718a357ebfb49c72e53787156a48/watchfiles-1.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:13c2ce7b72026cfbca120d652f02c7750f33b4c9395d79c9790b27f014c8a5a2", size = 519908 }, + { url = "https://files.pythonhosted.org/packages/0d/96/b57802d5f8164bdf070befb4fd3dec4edba5a364ec0670965a97eb8098ce/watchfiles-1.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:90192cdc15ab7254caa7765a98132a5a41471cf739513cc9bcf7d2ffcc0ec7b2", size = 501410 }, + { url = "https://files.pythonhosted.org/packages/8b/18/6db0de4e8911ba14e31853201b40c0fa9fea5ecf3feb86b0ad58f006dfc3/watchfiles-1.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:278aaa395f405972e9f523bd786ed59dfb61e4b827856be46a42130605fd0899", size = 452876 }, + { url = "https://files.pythonhosted.org/packages/df/df/092a961815edf723a38ba2638c49491365943919c3526cc9cf82c42786a6/watchfiles-1.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a462490e75e466edbb9fc4cd679b62187153b3ba804868452ef0577ec958f5ff", size = 615353 }, + { url = "https://files.pythonhosted.org/packages/f3/cf/b85fe645de4ff82f3f436c5e9032379fce37c303f6396a18f9726cc34519/watchfiles-1.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8d0d0630930f5cd5af929040e0778cf676a46775753e442a3f60511f2409f48f", size = 613187 }, + { url = "https://files.pythonhosted.org/packages/f6/d4/a9fea27aef4dd69689bc3556718c1157a7accb72aa035ece87c1fa8483b5/watchfiles-1.0.4-cp310-cp310-win32.whl", hash = "sha256:cc27a65069bcabac4552f34fd2dce923ce3fcde0721a16e4fb1b466d63ec831f", size = 270799 }, + { url = "https://files.pythonhosted.org/packages/df/02/dbe9d4439f15dd4ad0720b6e039bde9d66d1f830331f34c18eb70fa6608e/watchfiles-1.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:8b1f135238e75d075359cf506b27bf3f4ca12029c47d3e769d8593a2024ce161", size = 284145 }, + { url = "https://files.pythonhosted.org/packages/0f/bb/8461adc4b1fed009546fb797fc0d5698dcfe5e289cb37e1b8f16a93cdc30/watchfiles-1.0.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:2a9f93f8439639dc244c4d2902abe35b0279102bca7bbcf119af964f51d53c19", size = 394869 }, + { url = "https://files.pythonhosted.org/packages/55/88/9ebf36b3547176d1709c320de78c1fa3263a46be31b5b1267571d9102686/watchfiles-1.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9eea33ad8c418847dd296e61eb683cae1c63329b6d854aefcd412e12d94ee235", size = 384905 }, + { url = "https://files.pythonhosted.org/packages/03/8a/04335ce23ef78d8c69f0913e8b20cf7d9233e3986543aeef95ef2d6e43d2/watchfiles-1.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31f1a379c9dcbb3f09cf6be1b7e83b67c0e9faabed0471556d9438a4a4e14202", size = 449944 }, + { url = "https://files.pythonhosted.org/packages/17/4e/c8d5dcd14fe637f4633616dabea8a4af0a10142dccf3b43e0f081ba81ab4/watchfiles-1.0.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ab594e75644421ae0a2484554832ca5895f8cab5ab62de30a1a57db460ce06c6", size = 456020 }, + { url = "https://files.pythonhosted.org/packages/5e/74/3e91e09e1861dd7fbb1190ce7bd786700dc0fbc2ccd33bb9fff5de039229/watchfiles-1.0.4-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fc2eb5d14a8e0d5df7b36288979176fbb39672d45184fc4b1c004d7c3ce29317", size = 482983 }, + { url = "https://files.pythonhosted.org/packages/a1/3d/e64de2d1ce4eb6a574fd78ce3a28c279da263be9ef3cfcab6f708df192f2/watchfiles-1.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f68d8e9d5a321163ddacebe97091000955a1b74cd43724e346056030b0bacee", size = 520320 }, + { url = "https://files.pythonhosted.org/packages/2c/bd/52235f7063b57240c66a991696ed27e2a18bd6fcec8a1ea5a040b70d0611/watchfiles-1.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f9ce064e81fe79faa925ff03b9f4c1a98b0bbb4a1b8c1b015afa93030cb21a49", size = 500988 }, + { url = "https://files.pythonhosted.org/packages/3a/b0/ff04194141a5fe650c150400dd9e42667916bc0f52426e2e174d779b8a74/watchfiles-1.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b77d5622ac5cc91d21ae9c2b284b5d5c51085a0bdb7b518dba263d0af006132c", size = 452573 }, + { url = "https://files.pythonhosted.org/packages/3d/9d/966164332c5a178444ae6d165082d4f351bd56afd9c3ec828eecbf190e6a/watchfiles-1.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1941b4e39de9b38b868a69b911df5e89dc43767feeda667b40ae032522b9b5f1", size = 615114 }, + { url = "https://files.pythonhosted.org/packages/94/df/f569ae4c1877f96ad4086c153a8eee5a19a3b519487bf5c9454a3438c341/watchfiles-1.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4f8c4998506241dedf59613082d1c18b836e26ef2a4caecad0ec41e2a15e4226", size = 613076 }, + { url = "https://files.pythonhosted.org/packages/15/ae/8ce5f29e65d5fa5790e3c80c289819c55e12be2e1b9f5b6a0e55e169b97d/watchfiles-1.0.4-cp311-cp311-win32.whl", hash = "sha256:4ebbeca9360c830766b9f0df3640b791be569d988f4be6c06d6fae41f187f105", size = 271013 }, + { url = "https://files.pythonhosted.org/packages/a4/c6/79dc4a7c598a978e5fafa135090aaf7bbb03b8dec7bada437dfbe578e7ed/watchfiles-1.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:05d341c71f3d7098920f8551d4df47f7b57ac5b8dad56558064c3431bdfc0b74", size = 284229 }, + { url = "https://files.pythonhosted.org/packages/37/3d/928633723211753f3500bfb138434f080363b87a1b08ca188b1ce54d1e05/watchfiles-1.0.4-cp311-cp311-win_arm64.whl", hash = "sha256:32b026a6ab64245b584acf4931fe21842374da82372d5c039cba6bf99ef722f3", size = 276824 }, + { url = "https://files.pythonhosted.org/packages/5b/1a/8f4d9a1461709756ace48c98f07772bc6d4519b1e48b5fa24a4061216256/watchfiles-1.0.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:229e6ec880eca20e0ba2f7e2249c85bae1999d330161f45c78d160832e026ee2", size = 391345 }, + { url = "https://files.pythonhosted.org/packages/bc/d2/6750b7b3527b1cdaa33731438432e7238a6c6c40a9924049e4cebfa40805/watchfiles-1.0.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5717021b199e8353782dce03bd8a8f64438832b84e2885c4a645f9723bf656d9", size = 381515 }, + { url = "https://files.pythonhosted.org/packages/4e/17/80500e42363deef1e4b4818729ed939aaddc56f82f4e72b2508729dd3c6b/watchfiles-1.0.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0799ae68dfa95136dde7c472525700bd48777875a4abb2ee454e3ab18e9fc712", size = 449767 }, + { url = "https://files.pythonhosted.org/packages/10/37/1427fa4cfa09adbe04b1e97bced19a29a3462cc64c78630787b613a23f18/watchfiles-1.0.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:43b168bba889886b62edb0397cab5b6490ffb656ee2fcb22dec8bfeb371a9e12", size = 455677 }, + { url = "https://files.pythonhosted.org/packages/c5/7a/39e9397f3a19cb549a7d380412fd9e507d4854eddc0700bfad10ef6d4dba/watchfiles-1.0.4-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb2c46e275fbb9f0c92e7654b231543c7bbfa1df07cdc4b99fa73bedfde5c844", size = 482219 }, + { url = "https://files.pythonhosted.org/packages/45/2d/7113931a77e2ea4436cad0c1690c09a40a7f31d366f79c6f0a5bc7a4f6d5/watchfiles-1.0.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:857f5fc3aa027ff5e57047da93f96e908a35fe602d24f5e5d8ce64bf1f2fc733", size = 518830 }, + { url = "https://files.pythonhosted.org/packages/f9/1b/50733b1980fa81ef3c70388a546481ae5fa4c2080040100cd7bf3bf7b321/watchfiles-1.0.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55ccfd27c497b228581e2838d4386301227fc0cb47f5a12923ec2fe4f97b95af", size = 497997 }, + { url = "https://files.pythonhosted.org/packages/2b/b4/9396cc61b948ef18943e7c85ecfa64cf940c88977d882da57147f62b34b1/watchfiles-1.0.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c11ea22304d17d4385067588123658e9f23159225a27b983f343fcffc3e796a", size = 452249 }, + { url = "https://files.pythonhosted.org/packages/fb/69/0c65a5a29e057ad0dc691c2fa6c23b2983c7dabaa190ba553b29ac84c3cc/watchfiles-1.0.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:74cb3ca19a740be4caa18f238298b9d472c850f7b2ed89f396c00a4c97e2d9ff", size = 614412 }, + { url = "https://files.pythonhosted.org/packages/7f/b9/319fcba6eba5fad34327d7ce16a6b163b39741016b1996f4a3c96b8dd0e1/watchfiles-1.0.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c7cce76c138a91e720d1df54014a047e680b652336e1b73b8e3ff3158e05061e", size = 611982 }, + { url = "https://files.pythonhosted.org/packages/f1/47/143c92418e30cb9348a4387bfa149c8e0e404a7c5b0585d46d2f7031b4b9/watchfiles-1.0.4-cp312-cp312-win32.whl", hash = "sha256:b045c800d55bc7e2cadd47f45a97c7b29f70f08a7c2fa13241905010a5493f94", size = 271822 }, + { url = "https://files.pythonhosted.org/packages/ea/94/b0165481bff99a64b29e46e07ac2e0df9f7a957ef13bec4ceab8515f44e3/watchfiles-1.0.4-cp312-cp312-win_amd64.whl", hash = "sha256:c2acfa49dd0ad0bf2a9c0bb9a985af02e89345a7189be1efc6baa085e0f72d7c", size = 285441 }, + { url = "https://files.pythonhosted.org/packages/11/de/09fe56317d582742d7ca8c2ca7b52a85927ebb50678d9b0fa8194658f536/watchfiles-1.0.4-cp312-cp312-win_arm64.whl", hash = "sha256:22bb55a7c9e564e763ea06c7acea24fc5d2ee5dfc5dafc5cfbedfe58505e9f90", size = 277141 }, + { url = "https://files.pythonhosted.org/packages/08/98/f03efabec64b5b1fa58c0daab25c68ef815b0f320e54adcacd0d6847c339/watchfiles-1.0.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:8012bd820c380c3d3db8435e8cf7592260257b378b649154a7948a663b5f84e9", size = 390954 }, + { url = "https://files.pythonhosted.org/packages/16/09/4dd49ba0a32a45813debe5fb3897955541351ee8142f586303b271a02b40/watchfiles-1.0.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:aa216f87594f951c17511efe5912808dfcc4befa464ab17c98d387830ce07b60", size = 381133 }, + { url = "https://files.pythonhosted.org/packages/76/59/5aa6fc93553cd8d8ee75c6247763d77c02631aed21551a97d94998bf1dae/watchfiles-1.0.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62c9953cf85529c05b24705639ffa390f78c26449e15ec34d5339e8108c7c407", size = 449516 }, + { url = "https://files.pythonhosted.org/packages/4c/aa/df4b6fe14b6317290b91335b23c96b488d365d65549587434817e06895ea/watchfiles-1.0.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7cf684aa9bba4cd95ecb62c822a56de54e3ae0598c1a7f2065d51e24637a3c5d", size = 454820 }, + { url = "https://files.pythonhosted.org/packages/5e/71/185f8672f1094ce48af33252c73e39b48be93b761273872d9312087245f6/watchfiles-1.0.4-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f44a39aee3cbb9b825285ff979ab887a25c5d336e5ec3574f1506a4671556a8d", size = 481550 }, + { url = "https://files.pythonhosted.org/packages/85/d7/50ebba2c426ef1a5cb17f02158222911a2e005d401caf5d911bfca58f4c4/watchfiles-1.0.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38320582736922be8c865d46520c043bff350956dfc9fbaee3b2df4e1740a4b", size = 518647 }, + { url = "https://files.pythonhosted.org/packages/f0/7a/4c009342e393c545d68987e8010b937f72f47937731225b2b29b7231428f/watchfiles-1.0.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:39f4914548b818540ef21fd22447a63e7be6e24b43a70f7642d21f1e73371590", size = 497547 }, + { url = "https://files.pythonhosted.org/packages/0f/7c/1cf50b35412d5c72d63b2bf9a4fffee2e1549a245924960dd087eb6a6de4/watchfiles-1.0.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f12969a3765909cf5dc1e50b2436eb2c0e676a3c75773ab8cc3aa6175c16e902", size = 452179 }, + { url = "https://files.pythonhosted.org/packages/d6/a9/3db1410e1c1413735a9a472380e4f431ad9a9e81711cda2aaf02b7f62693/watchfiles-1.0.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:0986902677a1a5e6212d0c49b319aad9cc48da4bd967f86a11bde96ad9676ca1", size = 614125 }, + { url = "https://files.pythonhosted.org/packages/f2/e1/0025d365cf6248c4d1ee4c3d2e3d373bdd3f6aff78ba4298f97b4fad2740/watchfiles-1.0.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:308ac265c56f936636e3b0e3f59e059a40003c655228c131e1ad439957592303", size = 611911 }, + { url = "https://files.pythonhosted.org/packages/55/55/035838277d8c98fc8c917ac9beeb0cd6c59d675dc2421df5f9fcf44a0070/watchfiles-1.0.4-cp313-cp313-win32.whl", hash = "sha256:aee397456a29b492c20fda2d8961e1ffb266223625346ace14e4b6d861ba9c80", size = 271152 }, + { url = "https://files.pythonhosted.org/packages/f0/e5/96b8e55271685ddbadc50ce8bc53aa2dff278fb7ac4c2e473df890def2dc/watchfiles-1.0.4-cp313-cp313-win_amd64.whl", hash = "sha256:d6097538b0ae5c1b88c3b55afa245a66793a8fec7ada6755322e465fb1a0e8cc", size = 285216 }, + { url = "https://files.pythonhosted.org/packages/15/81/54484fc2fa715abe79694b975692af963f0878fb9d72b8251aa542bf3f10/watchfiles-1.0.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:d3452c1ec703aa1c61e15dfe9d482543e4145e7c45a6b8566978fbb044265a21", size = 394967 }, + { url = "https://files.pythonhosted.org/packages/14/b3/557f0cd90add86586fe3deeebd11e8299db6bc3452b44a534f844c6ab831/watchfiles-1.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7b75fee5a16826cf5c46fe1c63116e4a156924d668c38b013e6276f2582230f0", size = 384707 }, + { url = "https://files.pythonhosted.org/packages/03/a3/34638e1bffcb85a405e7b005e30bb211fd9be2ab2cb1847f2ceb81bef27b/watchfiles-1.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e997802d78cdb02623b5941830ab06f8860038faf344f0d288d325cc9c5d2ff", size = 450442 }, + { url = "https://files.pythonhosted.org/packages/8f/9f/6a97460dd11a606003d634c7158d9fea8517e98daffc6f56d0f5fde2e86a/watchfiles-1.0.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e0611d244ce94d83f5b9aff441ad196c6e21b55f77f3c47608dcf651efe54c4a", size = 455959 }, + { url = "https://files.pythonhosted.org/packages/9d/bb/e0648c6364e4d37ec692bc3f0c77507d17d8bb8f75689148819142010bbf/watchfiles-1.0.4-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9745a4210b59e218ce64c91deb599ae8775c8a9da4e95fb2ee6fe745fc87d01a", size = 483187 }, + { url = "https://files.pythonhosted.org/packages/dd/ad/d9290586a25288a81dfa8ad6329cf1de32aa1a9798ace45259eb95dcfb37/watchfiles-1.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4810ea2ae622add560f4aa50c92fef975e475f7ac4900ce5ff5547b2434642d8", size = 519733 }, + { url = "https://files.pythonhosted.org/packages/4e/a9/150c1666825cc9637093f8cae7fc6f53b3296311ab8bd65f1389acb717cb/watchfiles-1.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:740d103cd01458f22462dedeb5a3382b7f2c57d07ff033fbc9465919e5e1d0f3", size = 502275 }, + { url = "https://files.pythonhosted.org/packages/44/dc/5bfd21e20a330aca1706ac44713bc322838061938edf4b53130f97a7b211/watchfiles-1.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdbd912a61543a36aef85e34f212e5d2486e7c53ebfdb70d1e0b060cc50dd0bf", size = 452907 }, + { url = "https://files.pythonhosted.org/packages/50/fe/8f4fc488f1699f564687b697456eb5c0cb8e2b0b8538150511c234c62094/watchfiles-1.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0bc80d91ddaf95f70258cf78c471246846c1986bcc5fd33ccc4a1a67fcb40f9a", size = 615927 }, + { url = "https://files.pythonhosted.org/packages/ad/19/2e45f6f6eec89dd97a4d281635e3d73c17e5f692e7432063bdfdf9562c89/watchfiles-1.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab0311bb2ffcd9f74b6c9de2dda1612c13c84b996d032cd74799adb656af4e8b", size = 613435 }, + { url = "https://files.pythonhosted.org/packages/91/17/dc5ac62ca377827c24321d68050efc2eaee2ebaf3f21d055bbce2206d309/watchfiles-1.0.4-cp39-cp39-win32.whl", hash = "sha256:02a526ee5b5a09e8168314c905fc545c9bc46509896ed282aeb5a8ba9bd6ca27", size = 270810 }, + { url = "https://files.pythonhosted.org/packages/82/2b/dad851342492d538e7ffe72a8c756f747dd147988abb039ac9d6577d2235/watchfiles-1.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:a5ae5706058b27c74bac987d615105da17724172d5aaacc6c362a40599b6de43", size = 284866 }, + { url = "https://files.pythonhosted.org/packages/6f/06/175d5ac6b838fb319008c0cd981d7bf289317c510154d411d3584ca2b67b/watchfiles-1.0.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cdcc92daeae268de1acf5b7befcd6cfffd9a047098199056c72e4623f531de18", size = 396269 }, + { url = "https://files.pythonhosted.org/packages/86/ee/5db93b0b57dc0587abdbac4149296ee73275f615d790a82cb5598af0557f/watchfiles-1.0.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d8d3d9203705b5797f0af7e7e5baa17c8588030aaadb7f6a86107b7247303817", size = 386010 }, + { url = "https://files.pythonhosted.org/packages/75/61/fe0dc5fedf152bfc085a53711f740701f6bdb8ab6b5c950402b681d4858b/watchfiles-1.0.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bdef5a1be32d0b07dcea3318a0be95d42c98ece24177820226b56276e06b63b0", size = 450913 }, + { url = "https://files.pythonhosted.org/packages/9f/dd/3c7731af3baf1a9957afc643d176f94480921a690ec3237c9f9d11301c08/watchfiles-1.0.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:342622287b5604ddf0ed2d085f3a589099c9ae8b7331df3ae9845571586c4f3d", size = 453474 }, + { url = "https://files.pythonhosted.org/packages/6b/b4/c3998f54c91a35cee60ee6d3a855a069c5dff2bae6865147a46e9090dccd/watchfiles-1.0.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9fe37a2de80aa785d340f2980276b17ef697ab8db6019b07ee4fd28a8359d2f3", size = 395565 }, + { url = "https://files.pythonhosted.org/packages/3f/05/ac1a4d235beb9ddfb8ac26ce93a00ba6bd1b1b43051ef12d7da957b4a9d1/watchfiles-1.0.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:9d1ef56b56ed7e8f312c934436dea93bfa3e7368adfcf3df4c0da6d4de959a1e", size = 385406 }, + { url = "https://files.pythonhosted.org/packages/4c/ea/36532e7d86525f4e52a10efed182abf33efb106a93d49f5fbc994b256bcd/watchfiles-1.0.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95b42cac65beae3a362629950c444077d1b44f1790ea2772beaea95451c086bb", size = 450424 }, + { url = "https://files.pythonhosted.org/packages/7a/e9/3cbcf4d70cd0b6d3f30631deae1bf37cc0be39887ca327a44462fe546bf5/watchfiles-1.0.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e0227b8ed9074c6172cf55d85b5670199c99ab11fd27d2c473aa30aec67ee42", size = 452488 }, +] + +[[package]] +name = "websockets" +version = "14.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f4/1b/380b883ce05bb5f45a905b61790319a28958a9ab1e4b6b95ff5464b60ca1/websockets-14.1.tar.gz", hash = "sha256:398b10c77d471c0aab20a845e7a60076b6390bfdaac7a6d2edb0d2c59d75e8d8", size = 162840 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/af/91/b1b375dbd856fd5fff3f117de0e520542343ecaf4e8fc60f1ac1e9f5822c/websockets-14.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a0adf84bc2e7c86e8a202537b4fd50e6f7f0e4a6b6bf64d7ccb96c4cd3330b29", size = 161950 }, + { url = "https://files.pythonhosted.org/packages/61/8f/4d52f272d3ebcd35e1325c646e98936099a348374d4a6b83b524bded8116/websockets-14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90b5d9dfbb6d07a84ed3e696012610b6da074d97453bd01e0e30744b472c8179", size = 159601 }, + { url = "https://files.pythonhosted.org/packages/c4/b1/29e87b53eb1937992cdee094a0988aadc94f25cf0b37e90c75eed7123d75/websockets-14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2177ee3901075167f01c5e335a6685e71b162a54a89a56001f1c3e9e3d2ad250", size = 159854 }, + { url = "https://files.pythonhosted.org/packages/3f/e6/752a2f5e8321ae2a613062676c08ff2fccfb37dc837a2ee919178a372e8a/websockets-14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f14a96a0034a27f9d47fd9788913924c89612225878f8078bb9d55f859272b0", size = 168835 }, + { url = "https://files.pythonhosted.org/packages/60/27/ca62de7877596926321b99071639275e94bb2401397130b7cf33dbf2106a/websockets-14.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f874ba705deea77bcf64a9da42c1f5fc2466d8f14daf410bc7d4ceae0a9fcb0", size = 167844 }, + { url = "https://files.pythonhosted.org/packages/7e/db/f556a1d06635c680ef376be626c632e3f2bbdb1a0189d1d1bffb061c3b70/websockets-14.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9607b9a442392e690a57909c362811184ea429585a71061cd5d3c2b98065c199", size = 168157 }, + { url = "https://files.pythonhosted.org/packages/b3/bc/99e5f511838c365ac6ecae19674eb5e94201aa4235bd1af3e6fa92c12905/websockets-14.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:bea45f19b7ca000380fbd4e02552be86343080120d074b87f25593ce1700ad58", size = 168561 }, + { url = "https://files.pythonhosted.org/packages/c6/e7/251491585bad61c79e525ac60927d96e4e17b18447cc9c3cfab47b2eb1b8/websockets-14.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:219c8187b3ceeadbf2afcf0f25a4918d02da7b944d703b97d12fb01510869078", size = 167979 }, + { url = "https://files.pythonhosted.org/packages/ac/98/7ac2e4eeada19bdbc7a3a66a58e3ebdf33648b9e1c5b3f08c3224df168cf/websockets-14.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ad2ab2547761d79926effe63de21479dfaf29834c50f98c4bf5b5480b5838434", size = 167925 }, + { url = "https://files.pythonhosted.org/packages/ab/3d/09e65c47ee2396b7482968068f6e9b516221e1032b12dcf843b9412a5dfb/websockets-14.1-cp310-cp310-win32.whl", hash = "sha256:1288369a6a84e81b90da5dbed48610cd7e5d60af62df9851ed1d1d23a9069f10", size = 162831 }, + { url = "https://files.pythonhosted.org/packages/8a/67/59828a3d09740e6a485acccfbb66600632f2178b6ed1b61388ee96f17d5a/websockets-14.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0744623852f1497d825a49a99bfbec9bea4f3f946df6eb9d8a2f0c37a2fec2e", size = 163266 }, + { url = "https://files.pythonhosted.org/packages/97/ed/c0d03cb607b7fe1f7ff45e2cd4bb5cd0f9e3299ced79c2c303a6fff44524/websockets-14.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:449d77d636f8d9c17952628cc7e3b8faf6e92a17ec581ec0c0256300717e1512", size = 161949 }, + { url = "https://files.pythonhosted.org/packages/06/91/bf0a44e238660d37a2dda1b4896235d20c29a2d0450f3a46cd688f43b239/websockets-14.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a35f704be14768cea9790d921c2c1cc4fc52700410b1c10948511039be824aac", size = 159606 }, + { url = "https://files.pythonhosted.org/packages/ff/b8/7185212adad274c2b42b6a24e1ee6b916b7809ed611cbebc33b227e5c215/websockets-14.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b1f3628a0510bd58968c0f60447e7a692933589b791a6b572fcef374053ca280", size = 159854 }, + { url = "https://files.pythonhosted.org/packages/5a/8a/0849968d83474be89c183d8ae8dcb7f7ada1a3c24f4d2a0d7333c231a2c3/websockets-14.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c3deac3748ec73ef24fc7be0b68220d14d47d6647d2f85b2771cb35ea847aa1", size = 169402 }, + { url = "https://files.pythonhosted.org/packages/bd/4f/ef886e37245ff6b4a736a09b8468dae05d5d5c99de1357f840d54c6f297d/websockets-14.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7048eb4415d46368ef29d32133134c513f507fff7d953c18c91104738a68c3b3", size = 168406 }, + { url = "https://files.pythonhosted.org/packages/11/43/e2dbd4401a63e409cebddedc1b63b9834de42f51b3c84db885469e9bdcef/websockets-14.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6cf0ad281c979306a6a34242b371e90e891bce504509fb6bb5246bbbf31e7b6", size = 168776 }, + { url = "https://files.pythonhosted.org/packages/6d/d6/7063e3f5c1b612e9f70faae20ebaeb2e684ffa36cb959eb0862ee2809b32/websockets-14.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cc1fc87428c1d18b643479caa7b15db7d544652e5bf610513d4a3478dbe823d0", size = 169083 }, + { url = "https://files.pythonhosted.org/packages/49/69/e6f3d953f2fa0f8a723cf18cd011d52733bd7f6e045122b24e0e7f49f9b0/websockets-14.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f95ba34d71e2fa0c5d225bde3b3bdb152e957150100e75c86bc7f3964c450d89", size = 168529 }, + { url = "https://files.pythonhosted.org/packages/70/ff/f31fa14561fc1d7b8663b0ed719996cf1f581abee32c8fb2f295a472f268/websockets-14.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9481a6de29105d73cf4515f2bef8eb71e17ac184c19d0b9918a3701c6c9c4f23", size = 168475 }, + { url = "https://files.pythonhosted.org/packages/f1/15/b72be0e4bf32ff373aa5baef46a4c7521b8ea93ad8b49ca8c6e8e764c083/websockets-14.1-cp311-cp311-win32.whl", hash = "sha256:368a05465f49c5949e27afd6fbe0a77ce53082185bbb2ac096a3a8afaf4de52e", size = 162833 }, + { url = "https://files.pythonhosted.org/packages/bc/ef/2d81679acbe7057ffe2308d422f744497b52009ea8bab34b6d74a2657d1d/websockets-14.1-cp311-cp311-win_amd64.whl", hash = "sha256:6d24fc337fc055c9e83414c94e1ee0dee902a486d19d2a7f0929e49d7d604b09", size = 163263 }, + { url = "https://files.pythonhosted.org/packages/55/64/55698544ce29e877c9188f1aee9093712411a8fc9732cca14985e49a8e9c/websockets-14.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ed907449fe5e021933e46a3e65d651f641975a768d0649fee59f10c2985529ed", size = 161957 }, + { url = "https://files.pythonhosted.org/packages/a2/b1/b088f67c2b365f2c86c7b48edb8848ac27e508caf910a9d9d831b2f343cb/websockets-14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:87e31011b5c14a33b29f17eb48932e63e1dcd3fa31d72209848652310d3d1f0d", size = 159620 }, + { url = "https://files.pythonhosted.org/packages/c1/89/2a09db1bbb40ba967a1b8225b07b7df89fea44f06de9365f17f684d0f7e6/websockets-14.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bc6ccf7d54c02ae47a48ddf9414c54d48af9c01076a2e1023e3b486b6e72c707", size = 159852 }, + { url = "https://files.pythonhosted.org/packages/ca/c1/f983138cd56e7d3079f1966e81f77ce6643f230cd309f73aa156bb181749/websockets-14.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9777564c0a72a1d457f0848977a1cbe15cfa75fa2f67ce267441e465717dcf1a", size = 169675 }, + { url = "https://files.pythonhosted.org/packages/c1/c8/84191455d8660e2a0bdb33878d4ee5dfa4a2cedbcdc88bbd097303b65bfa/websockets-14.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a655bde548ca98f55b43711b0ceefd2a88a71af6350b0c168aa77562104f3f45", size = 168619 }, + { url = "https://files.pythonhosted.org/packages/8d/a7/62e551fdcd7d44ea74a006dc193aba370505278ad76efd938664531ce9d6/websockets-14.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3dfff83ca578cada2d19e665e9c8368e1598d4e787422a460ec70e531dbdd58", size = 169042 }, + { url = "https://files.pythonhosted.org/packages/ad/ed/1532786f55922c1e9c4d329608e36a15fdab186def3ca9eb10d7465bc1cc/websockets-14.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6a6c9bcf7cdc0fd41cc7b7944447982e8acfd9f0d560ea6d6845428ed0562058", size = 169345 }, + { url = "https://files.pythonhosted.org/packages/ea/fb/160f66960d495df3de63d9bcff78e1b42545b2a123cc611950ffe6468016/websockets-14.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4b6caec8576e760f2c7dd878ba817653144d5f369200b6ddf9771d64385b84d4", size = 168725 }, + { url = "https://files.pythonhosted.org/packages/cf/53/1bf0c06618b5ac35f1d7906444b9958f8485682ab0ea40dee7b17a32da1e/websockets-14.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:eb6d38971c800ff02e4a6afd791bbe3b923a9a57ca9aeab7314c21c84bf9ff05", size = 168712 }, + { url = "https://files.pythonhosted.org/packages/e5/22/5ec2f39fff75f44aa626f86fa7f20594524a447d9c3be94d8482cd5572ef/websockets-14.1-cp312-cp312-win32.whl", hash = "sha256:1d045cbe1358d76b24d5e20e7b1878efe578d9897a25c24e6006eef788c0fdf0", size = 162838 }, + { url = "https://files.pythonhosted.org/packages/74/27/28f07df09f2983178db7bf6c9cccc847205d2b92ced986cd79565d68af4f/websockets-14.1-cp312-cp312-win_amd64.whl", hash = "sha256:90f4c7a069c733d95c308380aae314f2cb45bd8a904fb03eb36d1a4983a4993f", size = 163277 }, + { url = "https://files.pythonhosted.org/packages/34/77/812b3ba5110ed8726eddf9257ab55ce9e85d97d4aa016805fdbecc5e5d48/websockets-14.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:3630b670d5057cd9e08b9c4dab6493670e8e762a24c2c94ef312783870736ab9", size = 161966 }, + { url = "https://files.pythonhosted.org/packages/8d/24/4fcb7aa6986ae7d9f6d083d9d53d580af1483c5ec24bdec0978307a0f6ac/websockets-14.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:36ebd71db3b89e1f7b1a5deaa341a654852c3518ea7a8ddfdf69cc66acc2db1b", size = 159625 }, + { url = "https://files.pythonhosted.org/packages/f8/47/2a0a3a2fc4965ff5b9ce9324d63220156bd8bedf7f90824ab92a822e65fd/websockets-14.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5b918d288958dc3fa1c5a0b9aa3256cb2b2b84c54407f4813c45d52267600cd3", size = 159857 }, + { url = "https://files.pythonhosted.org/packages/dd/c8/d7b425011a15e35e17757e4df75b25e1d0df64c0c315a44550454eaf88fc/websockets-14.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00fe5da3f037041da1ee0cf8e308374e236883f9842c7c465aa65098b1c9af59", size = 169635 }, + { url = "https://files.pythonhosted.org/packages/93/39/6e3b5cffa11036c40bd2f13aba2e8e691ab2e01595532c46437b56575678/websockets-14.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8149a0f5a72ca36720981418eeffeb5c2729ea55fa179091c81a0910a114a5d2", size = 168578 }, + { url = "https://files.pythonhosted.org/packages/cf/03/8faa5c9576299b2adf34dcccf278fc6bbbcda8a3efcc4d817369026be421/websockets-14.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77569d19a13015e840b81550922056acabc25e3f52782625bc6843cfa034e1da", size = 169018 }, + { url = "https://files.pythonhosted.org/packages/8c/05/ea1fec05cc3a60defcdf0bb9f760c3c6bd2dd2710eff7ac7f891864a22ba/websockets-14.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cf5201a04550136ef870aa60ad3d29d2a59e452a7f96b94193bee6d73b8ad9a9", size = 169383 }, + { url = "https://files.pythonhosted.org/packages/21/1d/eac1d9ed787f80754e51228e78855f879ede1172c8b6185aca8cef494911/websockets-14.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:88cf9163ef674b5be5736a584c999e98daf3aabac6e536e43286eb74c126b9c7", size = 168773 }, + { url = "https://files.pythonhosted.org/packages/0e/1b/e808685530185915299740d82b3a4af3f2b44e56ccf4389397c7a5d95d39/websockets-14.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:836bef7ae338a072e9d1863502026f01b14027250a4545672673057997d5c05a", size = 168757 }, + { url = "https://files.pythonhosted.org/packages/b6/19/6ab716d02a3b068fbbeb6face8a7423156e12c446975312f1c7c0f4badab/websockets-14.1-cp313-cp313-win32.whl", hash = "sha256:0d4290d559d68288da9f444089fd82490c8d2744309113fc26e2da6e48b65da6", size = 162834 }, + { url = "https://files.pythonhosted.org/packages/6c/fd/ab6b7676ba712f2fc89d1347a4b5bdc6aa130de10404071f2b2606450209/websockets-14.1-cp313-cp313-win_amd64.whl", hash = "sha256:8621a07991add373c3c5c2cf89e1d277e49dc82ed72c75e3afc74bd0acc446f0", size = 163277 }, + { url = "https://files.pythonhosted.org/packages/4d/23/ac9d8c5ec7b90efc3687d60474ef7e698f8b75cb7c9dfedad72701e797c9/websockets-14.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:01bb2d4f0a6d04538d3c5dfd27c0643269656c28045a53439cbf1c004f90897a", size = 161945 }, + { url = "https://files.pythonhosted.org/packages/c5/6b/ffa450e3b736a86ae6b40ce20a758ac9af80c96a18548f6c323ed60329c5/websockets-14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:414ffe86f4d6f434a8c3b7913655a1a5383b617f9bf38720e7c0799fac3ab1c6", size = 159600 }, + { url = "https://files.pythonhosted.org/packages/74/62/f90d1fd57ea7337ecaa99f17c31a544b9dcdb7c7c32a3d3997ccc42d57d3/websockets-14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8fda642151d5affdee8a430bd85496f2e2517be3a2b9d2484d633d5712b15c56", size = 159850 }, + { url = "https://files.pythonhosted.org/packages/35/dd/1e71865de1f3c265e11d02b0b4c76178f84351c6611e515fbe3d2bd1b98c/websockets-14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd7c11968bc3860d5c78577f0dbc535257ccec41750675d58d8dc66aa47fe52c", size = 168616 }, + { url = "https://files.pythonhosted.org/packages/ba/ae/0d069b52e26d48402dbe90c7581eb6a5bed5d7dbe3d9ca3cf1033859d58e/websockets-14.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a032855dc7db987dff813583d04f4950d14326665d7e714d584560b140ae6b8b", size = 167619 }, + { url = "https://files.pythonhosted.org/packages/1c/3f/d3f2df62704c53e0296f0ce714921b6a15df10e2e463734c737b1d9e2522/websockets-14.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7e7ea2f782408c32d86b87a0d2c1fd8871b0399dd762364c731d86c86069a78", size = 167921 }, + { url = "https://files.pythonhosted.org/packages/e0/e2/2dcb295bdae9393070cea58c790d87d1d36149bb4319b1da6014c8a36d42/websockets-14.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:39450e6215f7d9f6f7bc2a6da21d79374729f5d052333da4d5825af8a97e6735", size = 168343 }, + { url = "https://files.pythonhosted.org/packages/6b/fd/fa48e8b4e10e2c165cbfc16dada7405b4008818be490fc6b99a4928e232a/websockets-14.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ceada5be22fa5a5a4cdeec74e761c2ee7db287208f54c718f2df4b7e200b8d4a", size = 167745 }, + { url = "https://files.pythonhosted.org/packages/42/45/79db33f2b744d2014b40946428e6c37ce944fde8791d82e1c2f4d4a67d96/websockets-14.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3fc753451d471cff90b8f467a1fc0ae64031cf2d81b7b34e1811b7e2691bc4bc", size = 167705 }, + { url = "https://files.pythonhosted.org/packages/da/27/f66507db34ca9c79562f28fa5983433f7b9080fd471cc188906006d36ba4/websockets-14.1-cp39-cp39-win32.whl", hash = "sha256:14839f54786987ccd9d03ed7f334baec0f02272e7ec4f6e9d427ff584aeea8b4", size = 162828 }, + { url = "https://files.pythonhosted.org/packages/11/25/bb8f81a4ec94f595adb845608c5ec9549cb6b446945b292fe61807c7c95b/websockets-14.1-cp39-cp39-win_amd64.whl", hash = "sha256:d9fd19ecc3a4d5ae82ddbfb30962cf6d874ff943e56e0c81f5169be2fda62979", size = 163271 }, + { url = "https://files.pythonhosted.org/packages/fb/cd/382a05a1ba2a93bd9fb807716a660751295df72e77204fb130a102fcdd36/websockets-14.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e5dc25a9dbd1a7f61eca4b7cb04e74ae4b963d658f9e4f9aad9cd00b688692c8", size = 159633 }, + { url = "https://files.pythonhosted.org/packages/b7/a0/fa7c62e2952ef028b422fbf420f9353d9dd4dfaa425de3deae36e98c0784/websockets-14.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:04a97aca96ca2acedf0d1f332c861c5a4486fdcba7bcef35873820f940c4231e", size = 159867 }, + { url = "https://files.pythonhosted.org/packages/c1/94/954b4924f868db31d5f0935893c7a8446515ee4b36bb8ad75a929469e453/websockets-14.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df174ece723b228d3e8734a6f2a6febbd413ddec39b3dc592f5a4aa0aff28098", size = 161121 }, + { url = "https://files.pythonhosted.org/packages/7a/2e/f12bbb41a8f2abb76428ba4fdcd9e67b5b364a3e7fa97c88f4d6950aa2d4/websockets-14.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:034feb9f4286476f273b9a245fb15f02c34d9586a5bc936aff108c3ba1b21beb", size = 160731 }, + { url = "https://files.pythonhosted.org/packages/13/97/b76979401f2373af1fe3e08f960b265cecab112e7dac803446fb98351a52/websockets-14.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:660c308dabd2b380807ab64b62985eaccf923a78ebc572bd485375b9ca2b7dc7", size = 160681 }, + { url = "https://files.pythonhosted.org/packages/39/9c/16916d9a436c109a1d7ba78817e8fee357b78968be3f6e6f517f43afa43d/websockets-14.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5a42d3ecbb2db5080fc578314439b1d79eef71d323dc661aa616fb492436af5d", size = 163316 }, + { url = "https://files.pythonhosted.org/packages/0f/57/50fd09848a80a1b63a572c610f230f8a17590ca47daf256eb28a0851df73/websockets-14.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:ddaa4a390af911da6f680be8be4ff5aaf31c4c834c1a9147bc21cbcbca2d4370", size = 159633 }, + { url = "https://files.pythonhosted.org/packages/d7/2f/db728b0c7962ad6a13ced8286325bf430b59722d943e7f6bdbd8a78e2bfe/websockets-14.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a4c805c6034206143fbabd2d259ec5e757f8b29d0a2f0bf3d2fe5d1f60147a4a", size = 159863 }, + { url = "https://files.pythonhosted.org/packages/fa/e4/21e7481936fbfffee138edb488a6184eb3468b402a8181b95b9e44f6a676/websockets-14.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:205f672a6c2c671a86d33f6d47c9b35781a998728d2c7c2a3e1cf3333fcb62b7", size = 161119 }, + { url = "https://files.pythonhosted.org/packages/64/2d/efb6cf716d4f9da60190756e06f8db2066faf1ae4a4a8657ab136dfcc7a8/websockets-14.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef440054124728cc49b01c33469de06755e5a7a4e83ef61934ad95fc327fbb0", size = 160724 }, + { url = "https://files.pythonhosted.org/packages/40/b0/a70b972d853c3f26040834fcff3dd45c8a0292af9f5f0b36f9fbb82d5d44/websockets-14.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7591d6f440af7f73c4bd9404f3772bfee064e639d2b6cc8c94076e71b2471c1", size = 160676 }, + { url = "https://files.pythonhosted.org/packages/4a/76/f9da7f97476cc7b8c74829bb4851f1faf660455839689ffcc354b52860a7/websockets-14.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:25225cc79cfebc95ba1d24cd3ab86aaa35bcd315d12fa4358939bd55e9bd74a5", size = 163311 }, + { url = "https://files.pythonhosted.org/packages/b0/0b/c7e5d11020242984d9d37990310520ed663b942333b83a033c2f20191113/websockets-14.1-py3-none-any.whl", hash = "sha256:4d4fc827a20abe6d544a119896f6b78ee13fe81cbfef416f3f2ddf09a03f0e2e", size = 156277 }, +] + +[[package]] +name = "winkerberos" +version = "0.12.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2d/4f/8db9aae372e88031877067a9d8da027d6e67454d233177cb49198ab216a5/winkerberos-0.12.0.tar.gz", hash = "sha256:b19b9b8c87ab9dc76bb325f0dd4e93a2d669abc68d2283eec25ed67176ad7ad3", size = 35572 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7b/71/69549a95c4077a35819b04f3179292eec7119903ec035995254a41a3622a/winkerberos-0.12.0-cp310-cp310-win32.whl", hash = "sha256:bb37e91f9959adbeb3c6ae25c828c1d033fa2b1b03176037d7bec0adfbb85b8f", size = 25297 }, + { url = "https://files.pythonhosted.org/packages/8d/47/c8e2138e51201f79f9adc73a13a6616c375d0490081b124e2d8eebf21711/winkerberos-0.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:e479a498ab1f93bde0c0eb880f2c68378272850db51b978c75e9d73148c44f9c", size = 27635 }, + { url = "https://files.pythonhosted.org/packages/bf/5b/5799a0b7b3162b4476443b16c7a12a63ec3dbd9e9e2bf622c5833c27079b/winkerberos-0.12.0-cp311-cp311-win32.whl", hash = "sha256:35ed9eedc2551063758756724c345d906b4a68b8d31bc9fd6e935c1eb37c4a35", size = 25297 }, + { url = "https://files.pythonhosted.org/packages/24/ec/d437a005207d3c66bdb22196f954d25716fea21b79d4873873a2cd836946/winkerberos-0.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:838fdab8f71905c5a80ee1c868e2c7f3c2fee233113e8e65cd989b353e9a980e", size = 27640 }, + { url = "https://files.pythonhosted.org/packages/12/6f/1cab2c1685c3cb55a5a6b87c75df33def11b25cf01525021fa4f18c2ba24/winkerberos-0.12.0-cp312-cp312-win32.whl", hash = "sha256:f8a9dedd35eda764cd0591d050234a8f381c57a559c16a914de311ed426f6f50", size = 25365 }, + { url = "https://files.pythonhosted.org/packages/01/e9/0408c1abd6d599d61709ceecafdb0f8ff725e015b8c5444db62de6466b37/winkerberos-0.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:22db6871a842d16bb045d93440d0acc98d7690320acd7d7174ae36509ce78198", size = 27678 }, + { url = "https://files.pythonhosted.org/packages/7a/ff/b6cd850e9bed012d289cbcf1a2c9f70292c6d2664f65c0b6741877f0f7ec/winkerberos-0.12.0-cp39-cp39-win32.whl", hash = "sha256:987a16e5fff8b6e1cd2d1a52db92c51ba657a34e6c55b0b7d96247f512ed7444", size = 25290 }, + { url = "https://files.pythonhosted.org/packages/85/a9/c2319bcf270170ddb9c52105851d7565e6ce7266dc5a3e6cdf97fb6fe43b/winkerberos-0.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:4ffe1b654884e169c88785aa3960cc8dc4f09b757d242b59b3022c632736d2cd", size = 27629 }, +] + +[[package]] +name = "zipp" +version = "3.21.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3f/50/bad581df71744867e9468ebd0bcd6505de3b275e06f202c2cb016e3ff56f/zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4", size = 24545 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/1a/7e4798e9339adc931158c9d69ecc34f5e6791489d469f5e50ec15e35f458/zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931", size = 9630 }, +] + +[[package]] +name = "zope-event" +version = "5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "setuptools" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/46/c2/427f1867bb96555d1d34342f1dd97f8c420966ab564d58d18469a1db8736/zope.event-5.0.tar.gz", hash = "sha256:bac440d8d9891b4068e2b5a2c5e2c9765a9df762944bda6955f96bb9b91e67cd", size = 17350 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fe/42/f8dbc2b9ad59e927940325a22d6d3931d630c3644dae7e2369ef5d9ba230/zope.event-5.0-py3-none-any.whl", hash = "sha256:2832e95014f4db26c47a13fdaef84cef2f4df37e66b59d8f1f4a8f319a632c26", size = 6824 }, +] + +[[package]] +name = "zope-interface" +version = "7.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "setuptools" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/30/93/9210e7606be57a2dfc6277ac97dcc864fd8d39f142ca194fdc186d596fda/zope.interface-7.2.tar.gz", hash = "sha256:8b49f1a3d1ee4cdaf5b32d2e738362c7f5e40ac8b46dd7d1a65e82a4872728fe", size = 252960 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/71/e6177f390e8daa7e75378505c5ab974e0bf59c1d3b19155638c7afbf4b2d/zope.interface-7.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ce290e62229964715f1011c3dbeab7a4a1e4971fd6f31324c4519464473ef9f2", size = 208243 }, + { url = "https://files.pythonhosted.org/packages/52/db/7e5f4226bef540f6d55acfd95cd105782bc6ee044d9b5587ce2c95558a5e/zope.interface-7.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:05b910a5afe03256b58ab2ba6288960a2892dfeef01336dc4be6f1b9ed02ab0a", size = 208759 }, + { url = "https://files.pythonhosted.org/packages/28/ea/fdd9813c1eafd333ad92464d57a4e3a82b37ae57c19497bcffa42df673e4/zope.interface-7.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:550f1c6588ecc368c9ce13c44a49b8d6b6f3ca7588873c679bd8fd88a1b557b6", size = 254922 }, + { url = "https://files.pythonhosted.org/packages/3b/d3/0000a4d497ef9fbf4f66bb6828b8d0a235e690d57c333be877bec763722f/zope.interface-7.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0ef9e2f865721553c6f22a9ff97da0f0216c074bd02b25cf0d3af60ea4d6931d", size = 249367 }, + { url = "https://files.pythonhosted.org/packages/3e/e5/0b359e99084f033d413419eff23ee9c2bd33bca2ca9f4e83d11856f22d10/zope.interface-7.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27f926f0dcb058211a3bb3e0e501c69759613b17a553788b2caeb991bed3b61d", size = 254488 }, + { url = "https://files.pythonhosted.org/packages/7b/90/12d50b95f40e3b2fc0ba7f7782104093b9fd62806b13b98ef4e580f2ca61/zope.interface-7.2-cp310-cp310-win_amd64.whl", hash = "sha256:144964649eba4c5e4410bb0ee290d338e78f179cdbfd15813de1a664e7649b3b", size = 211947 }, + { url = "https://files.pythonhosted.org/packages/98/7d/2e8daf0abea7798d16a58f2f3a2bf7588872eee54ac119f99393fdd47b65/zope.interface-7.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1909f52a00c8c3dcab6c4fad5d13de2285a4b3c7be063b239b8dc15ddfb73bd2", size = 208776 }, + { url = "https://files.pythonhosted.org/packages/a0/2a/0c03c7170fe61d0d371e4c7ea5b62b8cb79b095b3d630ca16719bf8b7b18/zope.interface-7.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:80ecf2451596f19fd607bb09953f426588fc1e79e93f5968ecf3367550396b22", size = 209296 }, + { url = "https://files.pythonhosted.org/packages/49/b4/451f19448772b4a1159519033a5f72672221e623b0a1bd2b896b653943d8/zope.interface-7.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:033b3923b63474800b04cba480b70f6e6243a62208071fc148354f3f89cc01b7", size = 260997 }, + { url = "https://files.pythonhosted.org/packages/65/94/5aa4461c10718062c8f8711161faf3249d6d3679c24a0b81dd6fc8ba1dd3/zope.interface-7.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a102424e28c6b47c67923a1f337ede4a4c2bba3965b01cf707978a801fc7442c", size = 255038 }, + { url = "https://files.pythonhosted.org/packages/9f/aa/1a28c02815fe1ca282b54f6705b9ddba20328fabdc37b8cf73fc06b172f0/zope.interface-7.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:25e6a61dcb184453bb00eafa733169ab6d903e46f5c2ace4ad275386f9ab327a", size = 259806 }, + { url = "https://files.pythonhosted.org/packages/a7/2c/82028f121d27c7e68632347fe04f4a6e0466e77bb36e104c8b074f3d7d7b/zope.interface-7.2-cp311-cp311-win_amd64.whl", hash = "sha256:3f6771d1647b1fc543d37640b45c06b34832a943c80d1db214a37c31161a93f1", size = 212305 }, + { url = "https://files.pythonhosted.org/packages/68/0b/c7516bc3bad144c2496f355e35bd699443b82e9437aa02d9867653203b4a/zope.interface-7.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:086ee2f51eaef1e4a52bd7d3111a0404081dadae87f84c0ad4ce2649d4f708b7", size = 208959 }, + { url = "https://files.pythonhosted.org/packages/a2/e9/1463036df1f78ff8c45a02642a7bf6931ae4a38a4acd6a8e07c128e387a7/zope.interface-7.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:21328fcc9d5b80768bf051faa35ab98fb979080c18e6f84ab3f27ce703bce465", size = 209357 }, + { url = "https://files.pythonhosted.org/packages/07/a8/106ca4c2add440728e382f1b16c7d886563602487bdd90004788d45eb310/zope.interface-7.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6dd02ec01f4468da0f234da9d9c8545c5412fef80bc590cc51d8dd084138a89", size = 264235 }, + { url = "https://files.pythonhosted.org/packages/fc/ca/57286866285f4b8a4634c12ca1957c24bdac06eae28fd4a3a578e30cf906/zope.interface-7.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8e7da17f53e25d1a3bde5da4601e026adc9e8071f9f6f936d0fe3fe84ace6d54", size = 259253 }, + { url = "https://files.pythonhosted.org/packages/96/08/2103587ebc989b455cf05e858e7fbdfeedfc3373358320e9c513428290b1/zope.interface-7.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cab15ff4832580aa440dc9790b8a6128abd0b88b7ee4dd56abacbc52f212209d", size = 264702 }, + { url = "https://files.pythonhosted.org/packages/5f/c7/3c67562e03b3752ba4ab6b23355f15a58ac2d023a6ef763caaca430f91f2/zope.interface-7.2-cp312-cp312-win_amd64.whl", hash = "sha256:29caad142a2355ce7cfea48725aa8bcf0067e2b5cc63fcf5cd9f97ad12d6afb5", size = 212466 }, + { url = "https://files.pythonhosted.org/packages/c6/3b/e309d731712c1a1866d61b5356a069dd44e5b01e394b6cb49848fa2efbff/zope.interface-7.2-cp313-cp313-macosx_10_9_x86_64.whl", hash = "sha256:3e0350b51e88658d5ad126c6a57502b19d5f559f6cb0a628e3dc90442b53dd98", size = 208961 }, + { url = "https://files.pythonhosted.org/packages/49/65/78e7cebca6be07c8fc4032bfbb123e500d60efdf7b86727bb8a071992108/zope.interface-7.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:15398c000c094b8855d7d74f4fdc9e73aa02d4d0d5c775acdef98cdb1119768d", size = 209356 }, + { url = "https://files.pythonhosted.org/packages/11/b1/627384b745310d082d29e3695db5f5a9188186676912c14b61a78bbc6afe/zope.interface-7.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:802176a9f99bd8cc276dcd3b8512808716492f6f557c11196d42e26c01a69a4c", size = 264196 }, + { url = "https://files.pythonhosted.org/packages/b8/f6/54548df6dc73e30ac6c8a7ff1da73ac9007ba38f866397091d5a82237bd3/zope.interface-7.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb23f58a446a7f09db85eda09521a498e109f137b85fb278edb2e34841055398", size = 259237 }, + { url = "https://files.pythonhosted.org/packages/b6/66/ac05b741c2129fdf668b85631d2268421c5cd1a9ff99be1674371139d665/zope.interface-7.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a71a5b541078d0ebe373a81a3b7e71432c61d12e660f1d67896ca62d9628045b", size = 264696 }, + { url = "https://files.pythonhosted.org/packages/0a/2f/1bccc6f4cc882662162a1158cda1a7f616add2ffe322b28c99cb031b4ffc/zope.interface-7.2-cp313-cp313-win_amd64.whl", hash = "sha256:4893395d5dd2ba655c38ceb13014fd65667740f09fa5bb01caa1e6284e48c0cd", size = 212472 }, + { url = "https://files.pythonhosted.org/packages/8c/2c/1f49dc8b4843c4f0848d8e43191aed312bad946a1563d1bf9e46cf2816ee/zope.interface-7.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7bd449c306ba006c65799ea7912adbbfed071089461a19091a228998b82b1fdb", size = 208349 }, + { url = "https://files.pythonhosted.org/packages/ed/7d/83ddbfc8424c69579a90fc8edc2b797223da2a8083a94d8dfa0e374c5ed4/zope.interface-7.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a19a6cc9c6ce4b1e7e3d319a473cf0ee989cbbe2b39201d7c19e214d2dfb80c7", size = 208799 }, + { url = "https://files.pythonhosted.org/packages/36/22/b1abd91854c1be03f5542fe092e6a745096d2eca7704d69432e119100583/zope.interface-7.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:72cd1790b48c16db85d51fbbd12d20949d7339ad84fd971427cf00d990c1f137", size = 254267 }, + { url = "https://files.pythonhosted.org/packages/2a/dd/fcd313ee216ad0739ae00e6126bc22a0af62a74f76a9ca668d16cd276222/zope.interface-7.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52e446f9955195440e787596dccd1411f543743c359eeb26e9b2c02b077b0519", size = 248614 }, + { url = "https://files.pythonhosted.org/packages/88/d4/4ba1569b856870527cec4bf22b91fe704b81a3c1a451b2ccf234e9e0666f/zope.interface-7.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ad9913fd858274db8dd867012ebe544ef18d218f6f7d1e3c3e6d98000f14b75", size = 253800 }, + { url = "https://files.pythonhosted.org/packages/69/da/c9cfb384c18bd3a26d9fc6a9b5f32ccea49ae09444f097eaa5ca9814aff9/zope.interface-7.2-cp39-cp39-win_amd64.whl", hash = "sha256:1090c60116b3da3bfdd0c03406e2f14a1ff53e5771aebe33fec1edc0a350175d", size = 211980 }, +] + +[[package]] +name = "zstandard" +version = "0.23.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "platform_python_implementation == 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ed/f6/2ac0287b442160a89d726b17a9184a4c615bb5237db763791a7fd16d9df1/zstandard-0.23.0.tar.gz", hash = "sha256:b2d8c62d08e7255f68f7a740bae85b3c9b8e5466baa9cbf7f57f1cde0ac6bc09", size = 681701 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/55/bd0487e86679db1823fc9ee0d8c9c78ae2413d34c0b461193b5f4c31d22f/zstandard-0.23.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bf0a05b6059c0528477fba9054d09179beb63744355cab9f38059548fedd46a9", size = 788701 }, + { url = "https://files.pythonhosted.org/packages/e1/8a/ccb516b684f3ad987dfee27570d635822e3038645b1a950c5e8022df1145/zstandard-0.23.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fc9ca1c9718cb3b06634c7c8dec57d24e9438b2aa9a0f02b8bb36bf478538880", size = 633678 }, + { url = "https://files.pythonhosted.org/packages/12/89/75e633d0611c028e0d9af6df199423bf43f54bea5007e6718ab7132e234c/zstandard-0.23.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77da4c6bfa20dd5ea25cbf12c76f181a8e8cd7ea231c673828d0386b1740b8dc", size = 4941098 }, + { url = "https://files.pythonhosted.org/packages/4a/7a/bd7f6a21802de358b63f1ee636ab823711c25ce043a3e9f043b4fcb5ba32/zstandard-0.23.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2170c7e0367dde86a2647ed5b6f57394ea7f53545746104c6b09fc1f4223573", size = 5308798 }, + { url = "https://files.pythonhosted.org/packages/79/3b/775f851a4a65013e88ca559c8ae42ac1352db6fcd96b028d0df4d7d1d7b4/zstandard-0.23.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c16842b846a8d2a145223f520b7e18b57c8f476924bda92aeee3a88d11cfc391", size = 5341840 }, + { url = "https://files.pythonhosted.org/packages/09/4f/0cc49570141dd72d4d95dd6fcf09328d1b702c47a6ec12fbed3b8aed18a5/zstandard-0.23.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:157e89ceb4054029a289fb504c98c6a9fe8010f1680de0201b3eb5dc20aa6d9e", size = 5440337 }, + { url = "https://files.pythonhosted.org/packages/e7/7c/aaa7cd27148bae2dc095191529c0570d16058c54c4597a7d118de4b21676/zstandard-0.23.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:203d236f4c94cd8379d1ea61db2fce20730b4c38d7f1c34506a31b34edc87bdd", size = 4861182 }, + { url = "https://files.pythonhosted.org/packages/ac/eb/4b58b5c071d177f7dc027129d20bd2a44161faca6592a67f8fcb0b88b3ae/zstandard-0.23.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dc5d1a49d3f8262be192589a4b72f0d03b72dcf46c51ad5852a4fdc67be7b9e4", size = 4932936 }, + { url = "https://files.pythonhosted.org/packages/44/f9/21a5fb9bb7c9a274b05ad700a82ad22ce82f7ef0f485980a1e98ed6e8c5f/zstandard-0.23.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:752bf8a74412b9892f4e5b58f2f890a039f57037f52c89a740757ebd807f33ea", size = 5464705 }, + { url = "https://files.pythonhosted.org/packages/49/74/b7b3e61db3f88632776b78b1db597af3f44c91ce17d533e14a25ce6a2816/zstandard-0.23.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:80080816b4f52a9d886e67f1f96912891074903238fe54f2de8b786f86baded2", size = 4857882 }, + { url = "https://files.pythonhosted.org/packages/4a/7f/d8eb1cb123d8e4c541d4465167080bec88481ab54cd0b31eb4013ba04b95/zstandard-0.23.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:84433dddea68571a6d6bd4fbf8ff398236031149116a7fff6f777ff95cad3df9", size = 4697672 }, + { url = "https://files.pythonhosted.org/packages/5e/05/f7dccdf3d121309b60342da454d3e706453a31073e2c4dac8e1581861e44/zstandard-0.23.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ab19a2d91963ed9e42b4e8d77cd847ae8381576585bad79dbd0a8837a9f6620a", size = 5206043 }, + { url = "https://files.pythonhosted.org/packages/86/9d/3677a02e172dccd8dd3a941307621c0cbd7691d77cb435ac3c75ab6a3105/zstandard-0.23.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:59556bf80a7094d0cfb9f5e50bb2db27fefb75d5138bb16fb052b61b0e0eeeb0", size = 5667390 }, + { url = "https://files.pythonhosted.org/packages/41/7e/0012a02458e74a7ba122cd9cafe491facc602c9a17f590367da369929498/zstandard-0.23.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:27d3ef2252d2e62476389ca8f9b0cf2bbafb082a3b6bfe9d90cbcbb5529ecf7c", size = 5198901 }, + { url = "https://files.pythonhosted.org/packages/65/3a/8f715b97bd7bcfc7342d8adcd99a026cb2fb550e44866a3b6c348e1b0f02/zstandard-0.23.0-cp310-cp310-win32.whl", hash = "sha256:5d41d5e025f1e0bccae4928981e71b2334c60f580bdc8345f824e7c0a4c2a813", size = 430596 }, + { url = "https://files.pythonhosted.org/packages/19/b7/b2b9eca5e5a01111e4fe8a8ffb56bdcdf56b12448a24effe6cfe4a252034/zstandard-0.23.0-cp310-cp310-win_amd64.whl", hash = "sha256:519fbf169dfac1222a76ba8861ef4ac7f0530c35dd79ba5727014613f91613d4", size = 495498 }, + { url = "https://files.pythonhosted.org/packages/9e/40/f67e7d2c25a0e2dc1744dd781110b0b60306657f8696cafb7ad7579469bd/zstandard-0.23.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:34895a41273ad33347b2fc70e1bff4240556de3c46c6ea430a7ed91f9042aa4e", size = 788699 }, + { url = "https://files.pythonhosted.org/packages/e8/46/66d5b55f4d737dd6ab75851b224abf0afe5774976fe511a54d2eb9063a41/zstandard-0.23.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:77ea385f7dd5b5676d7fd943292ffa18fbf5c72ba98f7d09fc1fb9e819b34c23", size = 633681 }, + { url = "https://files.pythonhosted.org/packages/63/b6/677e65c095d8e12b66b8f862b069bcf1f1d781b9c9c6f12eb55000d57583/zstandard-0.23.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:983b6efd649723474f29ed42e1467f90a35a74793437d0bc64a5bf482bedfa0a", size = 4944328 }, + { url = "https://files.pythonhosted.org/packages/59/cc/e76acb4c42afa05a9d20827116d1f9287e9c32b7ad58cc3af0721ce2b481/zstandard-0.23.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80a539906390591dd39ebb8d773771dc4db82ace6372c4d41e2d293f8e32b8db", size = 5311955 }, + { url = "https://files.pythonhosted.org/packages/78/e4/644b8075f18fc7f632130c32e8f36f6dc1b93065bf2dd87f03223b187f26/zstandard-0.23.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:445e4cb5048b04e90ce96a79b4b63140e3f4ab5f662321975679b5f6360b90e2", size = 5344944 }, + { url = "https://files.pythonhosted.org/packages/76/3f/dbafccf19cfeca25bbabf6f2dd81796b7218f768ec400f043edc767015a6/zstandard-0.23.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd30d9c67d13d891f2360b2a120186729c111238ac63b43dbd37a5a40670b8ca", size = 5442927 }, + { url = "https://files.pythonhosted.org/packages/0c/c3/d24a01a19b6733b9f218e94d1a87c477d523237e07f94899e1c10f6fd06c/zstandard-0.23.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d20fd853fbb5807c8e84c136c278827b6167ded66c72ec6f9a14b863d809211c", size = 4864910 }, + { url = "https://files.pythonhosted.org/packages/1c/a9/cf8f78ead4597264f7618d0875be01f9bc23c9d1d11afb6d225b867cb423/zstandard-0.23.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ed1708dbf4d2e3a1c5c69110ba2b4eb6678262028afd6c6fbcc5a8dac9cda68e", size = 4935544 }, + { url = "https://files.pythonhosted.org/packages/2c/96/8af1e3731b67965fb995a940c04a2c20997a7b3b14826b9d1301cf160879/zstandard-0.23.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:be9b5b8659dff1f913039c2feee1aca499cfbc19e98fa12bc85e037c17ec6ca5", size = 5467094 }, + { url = "https://files.pythonhosted.org/packages/ff/57/43ea9df642c636cb79f88a13ab07d92d88d3bfe3e550b55a25a07a26d878/zstandard-0.23.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:65308f4b4890aa12d9b6ad9f2844b7ee42c7f7a4fd3390425b242ffc57498f48", size = 4860440 }, + { url = "https://files.pythonhosted.org/packages/46/37/edb78f33c7f44f806525f27baa300341918fd4c4af9472fbc2c3094be2e8/zstandard-0.23.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:98da17ce9cbf3bfe4617e836d561e433f871129e3a7ac16d6ef4c680f13a839c", size = 4700091 }, + { url = "https://files.pythonhosted.org/packages/c1/f1/454ac3962671a754f3cb49242472df5c2cced4eb959ae203a377b45b1a3c/zstandard-0.23.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:8ed7d27cb56b3e058d3cf684d7200703bcae623e1dcc06ed1e18ecda39fee003", size = 5208682 }, + { url = "https://files.pythonhosted.org/packages/85/b2/1734b0fff1634390b1b887202d557d2dd542de84a4c155c258cf75da4773/zstandard-0.23.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:b69bb4f51daf461b15e7b3db033160937d3ff88303a7bc808c67bbc1eaf98c78", size = 5669707 }, + { url = "https://files.pythonhosted.org/packages/52/5a/87d6971f0997c4b9b09c495bf92189fb63de86a83cadc4977dc19735f652/zstandard-0.23.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:034b88913ecc1b097f528e42b539453fa82c3557e414b3de9d5632c80439a473", size = 5201792 }, + { url = "https://files.pythonhosted.org/packages/79/02/6f6a42cc84459d399bd1a4e1adfc78d4dfe45e56d05b072008d10040e13b/zstandard-0.23.0-cp311-cp311-win32.whl", hash = "sha256:f2d4380bf5f62daabd7b751ea2339c1a21d1c9463f1feb7fc2bdcea2c29c3160", size = 430586 }, + { url = "https://files.pythonhosted.org/packages/be/a2/4272175d47c623ff78196f3c10e9dc7045c1b9caf3735bf041e65271eca4/zstandard-0.23.0-cp311-cp311-win_amd64.whl", hash = "sha256:62136da96a973bd2557f06ddd4e8e807f9e13cbb0bfb9cc06cfe6d98ea90dfe0", size = 495420 }, + { url = "https://files.pythonhosted.org/packages/7b/83/f23338c963bd9de687d47bf32efe9fd30164e722ba27fb59df33e6b1719b/zstandard-0.23.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b4567955a6bc1b20e9c31612e615af6b53733491aeaa19a6b3b37f3b65477094", size = 788713 }, + { url = "https://files.pythonhosted.org/packages/5b/b3/1a028f6750fd9227ee0b937a278a434ab7f7fdc3066c3173f64366fe2466/zstandard-0.23.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e172f57cd78c20f13a3415cc8dfe24bf388614324d25539146594c16d78fcc8", size = 633459 }, + { url = "https://files.pythonhosted.org/packages/26/af/36d89aae0c1f95a0a98e50711bc5d92c144939efc1f81a2fcd3e78d7f4c1/zstandard-0.23.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0e166f698c5a3e914947388c162be2583e0c638a4703fc6a543e23a88dea3c1", size = 4945707 }, + { url = "https://files.pythonhosted.org/packages/cd/2e/2051f5c772f4dfc0aae3741d5fc72c3dcfe3aaeb461cc231668a4db1ce14/zstandard-0.23.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12a289832e520c6bd4dcaad68e944b86da3bad0d339ef7989fb7e88f92e96072", size = 5306545 }, + { url = "https://files.pythonhosted.org/packages/0a/9e/a11c97b087f89cab030fa71206963090d2fecd8eb83e67bb8f3ffb84c024/zstandard-0.23.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d50d31bfedd53a928fed6707b15a8dbeef011bb6366297cc435accc888b27c20", size = 5337533 }, + { url = "https://files.pythonhosted.org/packages/fc/79/edeb217c57fe1bf16d890aa91a1c2c96b28c07b46afed54a5dcf310c3f6f/zstandard-0.23.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72c68dda124a1a138340fb62fa21b9bf4848437d9ca60bd35db36f2d3345f373", size = 5436510 }, + { url = "https://files.pythonhosted.org/packages/81/4f/c21383d97cb7a422ddf1ae824b53ce4b51063d0eeb2afa757eb40804a8ef/zstandard-0.23.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53dd9d5e3d29f95acd5de6802e909ada8d8d8cfa37a3ac64836f3bc4bc5512db", size = 4859973 }, + { url = "https://files.pythonhosted.org/packages/ab/15/08d22e87753304405ccac8be2493a495f529edd81d39a0870621462276ef/zstandard-0.23.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6a41c120c3dbc0d81a8e8adc73312d668cd34acd7725f036992b1b72d22c1772", size = 4936968 }, + { url = "https://files.pythonhosted.org/packages/eb/fa/f3670a597949fe7dcf38119a39f7da49a8a84a6f0b1a2e46b2f71a0ab83f/zstandard-0.23.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:40b33d93c6eddf02d2c19f5773196068d875c41ca25730e8288e9b672897c105", size = 5467179 }, + { url = "https://files.pythonhosted.org/packages/4e/a9/dad2ab22020211e380adc477a1dbf9f109b1f8d94c614944843e20dc2a99/zstandard-0.23.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9206649ec587e6b02bd124fb7799b86cddec350f6f6c14bc82a2b70183e708ba", size = 4848577 }, + { url = "https://files.pythonhosted.org/packages/08/03/dd28b4484b0770f1e23478413e01bee476ae8227bbc81561f9c329e12564/zstandard-0.23.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76e79bc28a65f467e0409098fa2c4376931fd3207fbeb6b956c7c476d53746dd", size = 4693899 }, + { url = "https://files.pythonhosted.org/packages/2b/64/3da7497eb635d025841e958bcd66a86117ae320c3b14b0ae86e9e8627518/zstandard-0.23.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:66b689c107857eceabf2cf3d3fc699c3c0fe8ccd18df2219d978c0283e4c508a", size = 5199964 }, + { url = "https://files.pythonhosted.org/packages/43/a4/d82decbab158a0e8a6ebb7fc98bc4d903266bce85b6e9aaedea1d288338c/zstandard-0.23.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9c236e635582742fee16603042553d276cca506e824fa2e6489db04039521e90", size = 5655398 }, + { url = "https://files.pythonhosted.org/packages/f2/61/ac78a1263bc83a5cf29e7458b77a568eda5a8f81980691bbc6eb6a0d45cc/zstandard-0.23.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a8fffdbd9d1408006baaf02f1068d7dd1f016c6bcb7538682622c556e7b68e35", size = 5191313 }, + { url = "https://files.pythonhosted.org/packages/e7/54/967c478314e16af5baf849b6ee9d6ea724ae5b100eb506011f045d3d4e16/zstandard-0.23.0-cp312-cp312-win32.whl", hash = "sha256:dc1d33abb8a0d754ea4763bad944fd965d3d95b5baef6b121c0c9013eaf1907d", size = 430877 }, + { url = "https://files.pythonhosted.org/packages/75/37/872d74bd7739639c4553bf94c84af7d54d8211b626b352bc57f0fd8d1e3f/zstandard-0.23.0-cp312-cp312-win_amd64.whl", hash = "sha256:64585e1dba664dc67c7cdabd56c1e5685233fbb1fc1966cfba2a340ec0dfff7b", size = 495595 }, + { url = "https://files.pythonhosted.org/packages/80/f1/8386f3f7c10261fe85fbc2c012fdb3d4db793b921c9abcc995d8da1b7a80/zstandard-0.23.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:576856e8594e6649aee06ddbfc738fec6a834f7c85bf7cadd1c53d4a58186ef9", size = 788975 }, + { url = "https://files.pythonhosted.org/packages/16/e8/cbf01077550b3e5dc86089035ff8f6fbbb312bc0983757c2d1117ebba242/zstandard-0.23.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:38302b78a850ff82656beaddeb0bb989a0322a8bbb1bf1ab10c17506681d772a", size = 633448 }, + { url = "https://files.pythonhosted.org/packages/06/27/4a1b4c267c29a464a161aeb2589aff212b4db653a1d96bffe3598f3f0d22/zstandard-0.23.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2240ddc86b74966c34554c49d00eaafa8200a18d3a5b6ffbf7da63b11d74ee2", size = 4945269 }, + { url = "https://files.pythonhosted.org/packages/7c/64/d99261cc57afd9ae65b707e38045ed8269fbdae73544fd2e4a4d50d0ed83/zstandard-0.23.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ef230a8fd217a2015bc91b74f6b3b7d6522ba48be29ad4ea0ca3a3775bf7dd5", size = 5306228 }, + { url = "https://files.pythonhosted.org/packages/7a/cf/27b74c6f22541f0263016a0fd6369b1b7818941de639215c84e4e94b2a1c/zstandard-0.23.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:774d45b1fac1461f48698a9d4b5fa19a69d47ece02fa469825b442263f04021f", size = 5336891 }, + { url = "https://files.pythonhosted.org/packages/fa/18/89ac62eac46b69948bf35fcd90d37103f38722968e2981f752d69081ec4d/zstandard-0.23.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f77fa49079891a4aab203d0b1744acc85577ed16d767b52fc089d83faf8d8ed", size = 5436310 }, + { url = "https://files.pythonhosted.org/packages/a8/a8/5ca5328ee568a873f5118d5b5f70d1f36c6387716efe2e369010289a5738/zstandard-0.23.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ac184f87ff521f4840e6ea0b10c0ec90c6b1dcd0bad2f1e4a9a1b4fa177982ea", size = 4859912 }, + { url = "https://files.pythonhosted.org/packages/ea/ca/3781059c95fd0868658b1cf0440edd832b942f84ae60685d0cfdb808bca1/zstandard-0.23.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c363b53e257246a954ebc7c488304b5592b9c53fbe74d03bc1c64dda153fb847", size = 4936946 }, + { url = "https://files.pythonhosted.org/packages/ce/11/41a58986f809532742c2b832c53b74ba0e0a5dae7e8ab4642bf5876f35de/zstandard-0.23.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e7792606d606c8df5277c32ccb58f29b9b8603bf83b48639b7aedf6df4fe8171", size = 5466994 }, + { url = "https://files.pythonhosted.org/packages/83/e3/97d84fe95edd38d7053af05159465d298c8b20cebe9ccb3d26783faa9094/zstandard-0.23.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a0817825b900fcd43ac5d05b8b3079937073d2b1ff9cf89427590718b70dd840", size = 4848681 }, + { url = "https://files.pythonhosted.org/packages/6e/99/cb1e63e931de15c88af26085e3f2d9af9ce53ccafac73b6e48418fd5a6e6/zstandard-0.23.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:9da6bc32faac9a293ddfdcb9108d4b20416219461e4ec64dfea8383cac186690", size = 4694239 }, + { url = "https://files.pythonhosted.org/packages/ab/50/b1e703016eebbc6501fc92f34db7b1c68e54e567ef39e6e59cf5fb6f2ec0/zstandard-0.23.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fd7699e8fd9969f455ef2926221e0233f81a2542921471382e77a9e2f2b57f4b", size = 5200149 }, + { url = "https://files.pythonhosted.org/packages/aa/e0/932388630aaba70197c78bdb10cce2c91fae01a7e553b76ce85471aec690/zstandard-0.23.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:d477ed829077cd945b01fc3115edd132c47e6540ddcd96ca169facff28173057", size = 5655392 }, + { url = "https://files.pythonhosted.org/packages/02/90/2633473864f67a15526324b007a9f96c96f56d5f32ef2a56cc12f9548723/zstandard-0.23.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa6ce8b52c5987b3e34d5674b0ab529a4602b632ebab0a93b07bfb4dfc8f8a33", size = 5191299 }, + { url = "https://files.pythonhosted.org/packages/b0/4c/315ca5c32da7e2dc3455f3b2caee5c8c2246074a61aac6ec3378a97b7136/zstandard-0.23.0-cp313-cp313-win32.whl", hash = "sha256:a9b07268d0c3ca5c170a385a0ab9fb7fdd9f5fd866be004c4ea39e44edce47dd", size = 430862 }, + { url = "https://files.pythonhosted.org/packages/a2/bf/c6aaba098e2d04781e8f4f7c0ba3c7aa73d00e4c436bcc0cf059a66691d1/zstandard-0.23.0-cp313-cp313-win_amd64.whl", hash = "sha256:f3513916e8c645d0610815c257cbfd3242adfd5c4cfa78be514e5a3ebb42a41b", size = 495578 }, + { url = "https://files.pythonhosted.org/packages/fb/96/4fcafeb7e013a2386d22f974b5b97a0b9a65004ed58c87ae001599bfbd48/zstandard-0.23.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3aa014d55c3af933c1315eb4bb06dd0459661cc0b15cd61077afa6489bec63bb", size = 788697 }, + { url = "https://files.pythonhosted.org/packages/83/ff/a52ce725be69b86a2967ecba0497a8184540cc284c0991125515449e54e2/zstandard-0.23.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a7f0804bb3799414af278e9ad51be25edf67f78f916e08afdb983e74161b916", size = 633679 }, + { url = "https://files.pythonhosted.org/packages/34/0f/3dc62db122f6a9c481c335fff6fc9f4e88d8f6e2d47321ee3937328addb4/zstandard-0.23.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb2b1ecfef1e67897d336de3a0e3f52478182d6a47eda86cbd42504c5cbd009a", size = 4940416 }, + { url = "https://files.pythonhosted.org/packages/1d/e5/9fe0dd8c85fdc2f635e6660d07872a5dc4b366db566630161e39f9f804e1/zstandard-0.23.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:837bb6764be6919963ef41235fd56a6486b132ea64afe5fafb4cb279ac44f259", size = 5307693 }, + { url = "https://files.pythonhosted.org/packages/73/bf/fe62c0cd865c171ee8ed5bc83174b5382a2cb729c8d6162edfb99a83158b/zstandard-0.23.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1516c8c37d3a053b01c1c15b182f3b5f5eef19ced9b930b684a73bad121addf4", size = 5341236 }, + { url = "https://files.pythonhosted.org/packages/39/86/4fe79b30c794286110802a6cd44a73b6a314ac8196b9338c0fbd78c2407d/zstandard-0.23.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48ef6a43b1846f6025dde6ed9fee0c24e1149c1c25f7fb0a0585572b2f3adc58", size = 5439101 }, + { url = "https://files.pythonhosted.org/packages/72/ed/cacec235c581ebf8c608c7fb3d4b6b70d1b490d0e5128ea6996f809ecaef/zstandard-0.23.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11e3bf3c924853a2d5835b24f03eeba7fc9b07d8ca499e247e06ff5676461a15", size = 4860320 }, + { url = "https://files.pythonhosted.org/packages/f6/1e/2c589a2930f93946b132fc852c574a19d5edc23fad2b9e566f431050c7ec/zstandard-0.23.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2fb4535137de7e244c230e24f9d1ec194f61721c86ebea04e1581d9d06ea1269", size = 4931933 }, + { url = "https://files.pythonhosted.org/packages/8e/f5/30eadde3686d902b5d4692bb5f286977cbc4adc082145eb3f49d834b2eae/zstandard-0.23.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8c24f21fa2af4bb9f2c492a86fe0c34e6d2c63812a839590edaf177b7398f700", size = 5463878 }, + { url = "https://files.pythonhosted.org/packages/e0/c8/8aed1f0ab9854ef48e5ad4431367fcb23ce73f0304f7b72335a8edc66556/zstandard-0.23.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a8c86881813a78a6f4508ef9daf9d4995b8ac2d147dcb1a450448941398091c9", size = 4857192 }, + { url = "https://files.pythonhosted.org/packages/a8/c6/55e666cfbcd032b9e271865e8578fec56e5594d4faeac379d371526514f5/zstandard-0.23.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:fe3b385d996ee0822fd46528d9f0443b880d4d05528fd26a9119a54ec3f91c69", size = 4696513 }, + { url = "https://files.pythonhosted.org/packages/dc/bd/720b65bea63ec9de0ac7414c33b9baf271c8de8996e5ff324dc93fc90ff1/zstandard-0.23.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:82d17e94d735c99621bf8ebf9995f870a6b3e6d14543b99e201ae046dfe7de70", size = 5204823 }, + { url = "https://files.pythonhosted.org/packages/d8/40/d678db1556e3941d330cd4e95623a63ef235b18547da98fa184cbc028ecf/zstandard-0.23.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:c7c517d74bea1a6afd39aa612fa025e6b8011982a0897768a2f7c8ab4ebb78a2", size = 5666490 }, + { url = "https://files.pythonhosted.org/packages/ed/cc/c89329723d7515898a1fc7ef5d251264078548c505719d13e9511800a103/zstandard-0.23.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1fd7e0f1cfb70eb2f95a19b472ee7ad6d9a0a992ec0ae53286870c104ca939e5", size = 5196622 }, + { url = "https://files.pythonhosted.org/packages/78/4c/634289d41e094327a94500dfc919e58841b10ea3a9efdfafbac614797ec2/zstandard-0.23.0-cp39-cp39-win32.whl", hash = "sha256:43da0f0092281bf501f9c5f6f3b4c975a8a0ea82de49ba3f7100e64d422a1274", size = 430620 }, + { url = "https://files.pythonhosted.org/packages/a2/e2/0b0c5a0f4f7699fecd92c1ba6278ef9b01f2b0b0dd46f62bfc6729c05659/zstandard-0.23.0-cp39-cp39-win_amd64.whl", hash = "sha256:f8346bfa098532bc1fb6c7ef06783e969d87a99dd1d2a5a18a892c1d7a643c58", size = 495528 }, +] From a3cc43f60d605739a40680ec962b7204f1908426 Mon Sep 17 00:00:00 2001 From: Shane Harvey Date: Thu, 23 Jan 2025 12:47:19 -0800 Subject: [PATCH 166/182] PYTHON-4999 Resync retryable writes tests (#2073) --- test/retryable_writes/unified/aggregate-out-merge.json | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/test/retryable_writes/unified/aggregate-out-merge.json b/test/retryable_writes/unified/aggregate-out-merge.json index c46bf8c31f..fd25c345ac 100644 --- a/test/retryable_writes/unified/aggregate-out-merge.json +++ b/test/retryable_writes/unified/aggregate-out-merge.json @@ -1,6 +1,6 @@ { "description": "aggregate with $out/$merge does not set txnNumber", - "schemaVersion": "1.3", + "schemaVersion": "1.4", "runOnRequirements": [ { "minServerVersion": "3.6", @@ -45,6 +45,11 @@ "tests": [ { "description": "aggregate with $out does not set txnNumber", + "runOnRequirements": [ + { + "serverless": "forbid" + } + ], "operations": [ { "object": "collection0", From dc182310dabff470dbbfc7da3c09eb6a4e08dfed Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Fri, 24 Jan 2025 07:47:14 -0600 Subject: [PATCH 167/182] PYTHON-5047 Avoid updating the uv lock unintentionally (#2076) --- .evergreen/run-tests.sh | 8 ++++---- .evergreen/scripts/setup-dev-env.sh | 6 +++--- justfile | 2 +- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.evergreen/run-tests.sh b/.evergreen/run-tests.sh index d647955059..fbe310ad1e 100755 --- a/.evergreen/run-tests.sh +++ b/.evergreen/run-tests.sh @@ -37,7 +37,7 @@ export PIP_QUIET=1 # Quiet by default export PIP_PREFER_BINARY=1 # Prefer binary dists by default set +x -PYTHON_IMPL=$(uv run python -c "import platform; print(platform.python_implementation())") +PYTHON_IMPL=$(uv run --frozen python -c "import platform; print(platform.python_implementation())") # Try to source local Drivers Secrets if [ -f ./secrets-export.sh ]; then @@ -49,11 +49,11 @@ fi # Start compiling the args we'll pass to uv. # Run in an isolated environment so as not to pollute the base venv. -UV_ARGS=("--isolated --extra test") +UV_ARGS=("--isolated --frozen --extra test") # Ensure C extensions if applicable. if [ -z "${NO_EXT:-}" ] && [ "$PYTHON_IMPL" = "CPython" ]; then - uv run tools/fail_if_no_c.py + uv run --frozen tools/fail_if_no_c.py fi if [ "$AUTH" != "noauth" ]; then @@ -239,7 +239,7 @@ if [ -n "$PERF_TEST" ]; then fi echo "Running $AUTH tests over $SSL with python $(uv python find)" -uv run python -c 'import sys; print(sys.version)' +uv run --frozen python -c 'import sys; print(sys.version)' # Run the tests, and store the results in Evergreen compatible XUnit XML diff --git a/.evergreen/scripts/setup-dev-env.sh b/.evergreen/scripts/setup-dev-env.sh index 3f8d0c4292..ae4b44c626 100755 --- a/.evergreen/scripts/setup-dev-env.sh +++ b/.evergreen/scripts/setup-dev-env.sh @@ -32,11 +32,11 @@ if [ ! -d $BIN_DIR ]; then echo "export UV_PYTHON=$UV_PYTHON" >> $HERE/env.sh fi echo "Using python $UV_PYTHON" -uv sync -uv run --with pip pip install -e . +uv sync --frozen +uv run --frozen --with pip pip install -e . echo "Setting up python environment... done." # Ensure there is a pre-commit hook if there is a git checkout. if [ -d .git ] && [ ! -f .git/hooks/pre-commit ]; then - uv run pre-commit install + uv run --frozen pre-commit install fi diff --git a/justfile b/justfile index 6bcfe0c79c..8a076038a4 100644 --- a/justfile +++ b/justfile @@ -4,7 +4,7 @@ set dotenv-load set dotenv-filename := "./.evergreen/scripts/env.sh" # Commonly used command segments. -uv_run := "uv run --isolated " +uv_run := "uv run --isolated --frozen " typing_run := uv_run + "--group typing --extra aws --extra encryption --extra ocsp --extra snappy --extra test --extra zstd" docs_run := uv_run + "--extra docs" doc_build := "./doc/_build" From a3208df5c94620228b015ac79cd1548582c65ab1 Mon Sep 17 00:00:00 2001 From: Shane Harvey Date: Fri, 24 Jan 2025 14:30:07 -0800 Subject: [PATCH 168/182] PYTHON-5059 Update default maxMessageSizeBytes and maxWriteBatchSize (#2078) --- pymongo/common.py | 4 ++-- pymongo/hello.py | 2 +- test/test_server_description.py | 13 ++++++++----- 3 files changed, 11 insertions(+), 8 deletions(-) diff --git a/pymongo/common.py b/pymongo/common.py index 5661de011c..b442da6a3e 100644 --- a/pymongo/common.py +++ b/pymongo/common.py @@ -60,10 +60,10 @@ # Defaults until we connect to a server and get updated limits. MAX_BSON_SIZE = 16 * (1024**2) -MAX_MESSAGE_SIZE: int = 2 * MAX_BSON_SIZE +MAX_MESSAGE_SIZE = 48 * 1000 * 1000 MIN_WIRE_VERSION = 0 MAX_WIRE_VERSION = 0 -MAX_WRITE_BATCH_SIZE = 1000 +MAX_WRITE_BATCH_SIZE = 100000 # What this version of PyMongo supports. MIN_SUPPORTED_SERVER_VERSION = "4.0" diff --git a/pymongo/hello.py b/pymongo/hello.py index 62bb799805..c30b825e19 100644 --- a/pymongo/hello.py +++ b/pymongo/hello.py @@ -133,7 +133,7 @@ def max_bson_size(self) -> int: @property def max_message_size(self) -> int: - return self._doc.get("maxMessageSizeBytes", 2 * self.max_bson_size) + return self._doc.get("maxMessageSizeBytes", common.MAX_MESSAGE_SIZE) @property def max_write_batch_size(self) -> int: diff --git a/test/test_server_description.py b/test/test_server_description.py index fe7a5f7119..e8c0098cb6 100644 --- a/test/test_server_description.py +++ b/test/test_server_description.py @@ -23,6 +23,7 @@ from bson.int64 import Int64 from bson.objectid import ObjectId +from pymongo import common from pymongo.hello import Hello, HelloCompat from pymongo.server_description import ServerDescription from pymongo.server_type import SERVER_TYPE @@ -132,11 +133,13 @@ def test_fields(self): self.assertEqual(4, s.min_wire_version) self.assertEqual(25, s.max_wire_version) - def test_default_max_message_size(self): - s = parse_hello_response({"ok": 1, HelloCompat.LEGACY_CMD: True, "maxBsonObjectSize": 2}) - - # Twice max_bson_size. - self.assertEqual(4, s.max_message_size) + def test_defaults(self): + s = parse_hello_response({"ok": 1, HelloCompat.LEGACY_CMD: True}) + self.assertEqual(common.MAX_BSON_SIZE, s.max_bson_size) + self.assertEqual(common.MAX_MESSAGE_SIZE, s.max_message_size) + self.assertEqual(common.MIN_WIRE_VERSION, s.min_wire_version) + self.assertEqual(common.MAX_WIRE_VERSION, s.max_wire_version) + self.assertEqual(common.MAX_WRITE_BATCH_SIZE, s.max_write_batch_size) def test_standalone(self): s = parse_hello_response({"ok": 1, HelloCompat.LEGACY_CMD: True}) From 9082a4be23622458ee350c3171bc754cdf1db89a Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Fri, 24 Jan 2025 17:14:20 -0600 Subject: [PATCH 169/182] PYTHON-5058 Build linux aarch64 wheel using native runner and omit ppc64le and s390x wheels (#2077) --- .github/workflows/dist.yml | 7 ++++--- doc/changelog.rst | 2 ++ 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/.github/workflows/dist.yml b/.github/workflows/dist.yml index a4c5a8279b..5100c70d43 100644 --- a/.github/workflows/dist.yml +++ b/.github/workflows/dist.yml @@ -35,9 +35,10 @@ jobs: # https://github.com/github/feedback/discussions/7835#discussioncomment-1769026 buildplat: - [ubuntu-20.04, "manylinux_x86_64", "cp3*-manylinux_x86_64"] - - [ubuntu-20.04, "manylinux_aarch64", "cp3*-manylinux_aarch64"] - - [ubuntu-20.04, "manylinux_ppc64le", "cp3*-manylinux_ppc64le"] - - [ubuntu-20.04, "manylinux_s390x", "cp3*-manylinux_s390x"] + - [ubuntu-24.04-arm, "manylinux_aarch64", "cp3*-manylinux_aarch64"] + # Disabled pending PYTHON-5058 + # - [ubuntu-24.04, "manylinux_ppc64le", "cp3*-manylinux_ppc64le"] + # - [ubuntu-24.04, "manylinux_s390x", "cp3*-manylinux_s390x"] - [ubuntu-20.04, "manylinux_i686", "cp3*-manylinux_i686"] - [windows-2019, "win_amd6", "cp3*-win_amd64"] - [windows-2019, "win32", "cp3*-win32"] diff --git a/doc/changelog.rst b/doc/changelog.rst index 4942d85de8..1f3efb8ad0 100644 --- a/doc/changelog.rst +++ b/doc/changelog.rst @@ -11,6 +11,7 @@ Changes in Version 4.11.0 (YYYY/MM/DD) A future minor release of PyMongo will raise the minimum supported MongoDB Server version from 4.0 to 4.2. This is in accordance with [MongoDB Software Lifecycle Schedules](https://www.mongodb.com/legal/support-policy/lifecycles). **Support for MongoDB Server 4.0 will be dropped in a future release!** +.. warning:: This version does not include wheels for ``ppc64le`` or ``s390x`` architectures, see `PYTHON-5058`_ for more information. PyMongo 4.11 brings a number of changes including: @@ -49,6 +50,7 @@ in this release. .. _PyMongo 4.11 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=40784 .. _PYTHON-5027: https://jira.mongodb.org/browse/PYTHON-5027 .. _PYTHON-5024: https://jira.mongodb.org/browse/PYTHON-5024 +.. _PYTHON-5058: https://jira.mongodb.org/browse/PYTHON-5058 Changes in Version 4.10.1 (2024/10/01) -------------------------------------- From 2225ccadce0033f2ed16a99e44dc98085263da59 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Mon, 27 Jan 2025 08:59:29 -0600 Subject: [PATCH 170/182] PYTHON-5062 Add GitHub Actions CodeQL scanning (#2079) --- .github/workflows/codeql.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index e620cb1801..bb2418cf89 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -34,6 +34,8 @@ jobs: build-mode: manual - language: python build-mode: none + - language: actions + build-mode: none steps: - name: Checkout repository uses: actions/checkout@v4 From 848ab4f7db6e2a151b98765547300198e489172a Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Mon, 27 Jan 2025 09:24:48 -0600 Subject: [PATCH 171/182] PYTHON-5047 Improve testing of publish workflows (#2080) --- .github/workflows/release-python.yml | 23 +++++++++++++++++------ 1 file changed, 17 insertions(+), 6 deletions(-) diff --git a/.github/workflows/release-python.yml b/.github/workflows/release-python.yml index ee4ea32f82..6548a7d1ad 100644 --- a/.github/workflows/release-python.yml +++ b/.github/workflows/release-python.yml @@ -13,6 +13,8 @@ on: description: "Dry Run?" default: false type: boolean + schedule: + - cron: '30 5 * * *' env: # Changes per repo @@ -20,6 +22,10 @@ env: # Changes per branch SILK_ASSET_GROUP: mongodb-python-driver EVERGREEN_PROJECT: mongo-python-driver + # Constant + DRY_RUN: ${{ inputs.dry_run || 'true' }} + FOLLOWING_VERSION: ${{ inputs.following_version || '' }} + VERSION: ${{ inputs.version || '10.10.10.10' }} defaults: run: @@ -48,8 +54,8 @@ jobs: - uses: mongodb-labs/drivers-github-tools/python/pre-publish@v2 id: pre-publish with: - version: ${{ inputs.version }} - dry_run: ${{ inputs.dry_run }} + version: ${{ env.VERSION }} + dry_run: ${{ env.DRY_RUN }} build-dist: needs: [pre-publish] @@ -78,8 +84,13 @@ jobs: with: name: all-dist-${{ github.run_id }} path: dist/ + - name: Publish package distributions to TestPyPI + uses: pypa/gh-action-pypi-publish@release/v1 + with: + repository-url: https://test.pypi.org/legacy/ + skip-existing: true - name: Publish package distributions to PyPI - if: startsWith(inputs.dry_run, 'false') + if: startsWith(env.DRY_RUN, 'false') uses: pypa/gh-action-pypi-publish@release/v1 post-publish: @@ -104,10 +115,10 @@ jobs: artifactory_username: ${{ vars.ARTIFACTORY_USERNAME }} - uses: mongodb-labs/drivers-github-tools/python/post-publish@v2 with: - version: ${{ inputs.version }} - following_version: ${{ inputs.following_version }} + version: ${{ env.VERSION }} + following_version: ${{ env.FOLLOWING_VERSION }} product_name: ${{ env.PRODUCT_NAME }} silk_asset_group: ${{ env.SILK_ASSET_GROUP }} evergreen_project: ${{ env.EVERGREEN_PROJECT }} token: ${{ github.token }} - dry_run: ${{ inputs.dry_run }} + dry_run: ${{ env.DRY_RUN }} From 4567f8875eb8ea8c4a1bd2e0bb13e41e4d3da7b2 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Mon, 27 Jan 2025 13:43:11 -0600 Subject: [PATCH 172/182] PYTHON-5047 Fix handling of attestation on testpypi workflows (#2081) --- .github/workflows/release-python.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/release-python.yml b/.github/workflows/release-python.yml index 6548a7d1ad..88cffcda53 100644 --- a/.github/workflows/release-python.yml +++ b/.github/workflows/release-python.yml @@ -89,6 +89,7 @@ jobs: with: repository-url: https://test.pypi.org/legacy/ skip-existing: true + attestations: ${{ env.DRY_RUN }} - name: Publish package distributions to PyPI if: startsWith(env.DRY_RUN, 'false') uses: pypa/gh-action-pypi-publish@release/v1 From dc2993835eaeacb2dcb08969afafcbddf08f41fb Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Tue, 28 Jan 2025 09:27:09 -0600 Subject: [PATCH 173/182] PYTHON-5047 Fix dry run logic in releases (#2083) --- .github/workflows/release-python.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release-python.yml b/.github/workflows/release-python.yml index 88cffcda53..bcf37d1a22 100644 --- a/.github/workflows/release-python.yml +++ b/.github/workflows/release-python.yml @@ -23,7 +23,7 @@ env: SILK_ASSET_GROUP: mongodb-python-driver EVERGREEN_PROJECT: mongo-python-driver # Constant - DRY_RUN: ${{ inputs.dry_run || 'true' }} + DRY_RUN: ${{ inputs.dry_run == 'true' }} FOLLOWING_VERSION: ${{ inputs.following_version || '' }} VERSION: ${{ inputs.version || '10.10.10.10' }} From cae161ecddbc9c380c21b675da8c042603450c63 Mon Sep 17 00:00:00 2001 From: "mongodb-dbx-release-bot[bot]" <167856002+mongodb-dbx-release-bot[bot]@users.noreply.github.com> Date: Tue, 28 Jan 2025 15:30:50 +0000 Subject: [PATCH 174/182] BUMP 4.11 Signed-off-by: mongodb-dbx-release-bot[bot] <167856002+mongodb-dbx-release-bot[bot]@users.noreply.github.com> --- pymongo/_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pymongo/_version.py b/pymongo/_version.py index 3de24a8e14..22972c5ce4 100644 --- a/pymongo/_version.py +++ b/pymongo/_version.py @@ -18,7 +18,7 @@ import re from typing import List, Tuple, Union -__version__ = "4.11.0.dev0" +__version__ = "4.11" def get_version_tuple(version: str) -> Tuple[Union[int, str], ...]: From ed21a181721eb3644f3106f82be6a5f7c611d2d8 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Wed, 29 Jan 2025 13:27:07 -0600 Subject: [PATCH 175/182] PYTHON-5047 Fix dry run logic in releases again (#2092) (cherry picked from commit b4e32a1d8388fe5bf731c0c866b8bb96bbf19870) --- .github/workflows/release-python.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release-python.yml b/.github/workflows/release-python.yml index bcf37d1a22..0801d12f59 100644 --- a/.github/workflows/release-python.yml +++ b/.github/workflows/release-python.yml @@ -23,7 +23,7 @@ env: SILK_ASSET_GROUP: mongodb-python-driver EVERGREEN_PROJECT: mongo-python-driver # Constant - DRY_RUN: ${{ inputs.dry_run == 'true' }} + DRY_RUN: ${{ github.event_name == 'workflow_dispatch' && inputs.dry_run || 'true' }} FOLLOWING_VERSION: ${{ inputs.following_version || '' }} VERSION: ${{ inputs.version || '10.10.10.10' }} From d0301307a0f6b8a771c5b927e583630104dc59ab Mon Sep 17 00:00:00 2001 From: Noah Stapp Date: Wed, 29 Jan 2025 15:35:00 -0500 Subject: [PATCH 176/182] PYTHON-5112 - Fix just install (#2095) (cherry picked from commit 1784e2c4b9c7e5efbed1796e81e37fa49f8845f0) --- .evergreen/scripts/setup-dev-env.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.evergreen/scripts/setup-dev-env.sh b/.evergreen/scripts/setup-dev-env.sh index ae4b44c626..b56897961e 100755 --- a/.evergreen/scripts/setup-dev-env.sh +++ b/.evergreen/scripts/setup-dev-env.sh @@ -30,8 +30,8 @@ if [ ! -d $BIN_DIR ]; then fi export UV_PYTHON=${PYTHON_BINARY} echo "export UV_PYTHON=$UV_PYTHON" >> $HERE/env.sh + echo "Using python $UV_PYTHON" fi -echo "Using python $UV_PYTHON" uv sync --frozen uv run --frozen --with pip pip install -e . echo "Setting up python environment... done." From f1a8af160d21d0fe3c88c782e5219fd621344395 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Wed, 29 Jan 2025 17:46:36 -0600 Subject: [PATCH 177/182] PYTHON-5047 Fix dry run logic in releases yet again (#2098) (cherry picked from commit 34ae214e33e922e3478388517a2b37aa4fd64dba) --- .github/workflows/release-python.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/release-python.yml b/.github/workflows/release-python.yml index 0801d12f59..45157bfc2b 100644 --- a/.github/workflows/release-python.yml +++ b/.github/workflows/release-python.yml @@ -23,7 +23,9 @@ env: SILK_ASSET_GROUP: mongodb-python-driver EVERGREEN_PROJECT: mongo-python-driver # Constant - DRY_RUN: ${{ github.event_name == 'workflow_dispatch' && inputs.dry_run || 'true' }} + # inputs will be empty on a scheduled run. so, we only set dry_run + # to 'false' when the input is set to 'false'. + DRY_RUN: ${{ ! contains(inputs.dry_run, 'false') }} FOLLOWING_VERSION: ${{ inputs.following_version || '' }} VERSION: ${{ inputs.version || '10.10.10.10' }} From 3c9a248bba442cbdd0999807d6582cb1aef11df1 Mon Sep 17 00:00:00 2001 From: Jib Date: Fri, 31 Jan 2025 15:16:17 -0500 Subject: [PATCH 178/182] Update ReadTheDocs to include django-mongodb-backend (#2084) (cherry picked from commit 3b5788906ddeb326c0407f7b490aadde0f88c2ee) --- doc/tools.rst | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/doc/tools.rst b/doc/tools.rst index 6dd0df8a4d..7ec3ddb443 100644 --- a/doc/tools.rst +++ b/doc/tools.rst @@ -67,6 +67,14 @@ uMongo mongomock. The source `is available on GitHub `_ +Django MongoDB Backend + `Django MongoDB Backend `_ is a + database backend library specifically made for Django. The integration takes + advantage of MongoDB's unique document model capabilities, which align + naturally with Django's philosophy of simplified data modeling and + reduced development complexity. The source is available + `on GitHub `_. + No longer maintained """""""""""""""""""" From e15b820f719571ad7182e762ccd2ee183daa491a Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Thu, 6 Feb 2025 13:31:55 -0600 Subject: [PATCH 179/182] PYTHON-5058 Restore alternate architecture builds (#2133) (cherry picked from commit 041edbecba28b793e194479688a750e23c9317c2) --- .github/workflows/dist.yml | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/.github/workflows/dist.yml b/.github/workflows/dist.yml index 5100c70d43..3dee8f581c 100644 --- a/.github/workflows/dist.yml +++ b/.github/workflows/dist.yml @@ -35,10 +35,9 @@ jobs: # https://github.com/github/feedback/discussions/7835#discussioncomment-1769026 buildplat: - [ubuntu-20.04, "manylinux_x86_64", "cp3*-manylinux_x86_64"] - - [ubuntu-24.04-arm, "manylinux_aarch64", "cp3*-manylinux_aarch64"] - # Disabled pending PYTHON-5058 - # - [ubuntu-24.04, "manylinux_ppc64le", "cp3*-manylinux_ppc64le"] - # - [ubuntu-24.04, "manylinux_s390x", "cp3*-manylinux_s390x"] + - [ubuntu-20.04, "manylinux_aarch64", "cp3*-manylinux_aarch64"] + - [ubuntu-20.04, "manylinux_ppc64le", "cp3*-manylinux_ppc64le"] + - [ubuntu-20.04, "manylinux_s390x", "cp3*-manylinux_s390x"] - [ubuntu-20.04, "manylinux_i686", "cp3*-manylinux_i686"] - [windows-2019, "win_amd6", "cp3*-win_amd64"] - [windows-2019, "win32", "cp3*-win32"] @@ -63,6 +62,10 @@ jobs: if: runner.os == 'Linux' uses: docker/setup-qemu-action@v3 with: + # setup-qemu-action by default uses `tonistiigi/binfmt:latest` image, + # which is out of date. This causes seg faults during build. + # Here we manually fix the version. + image: tonistiigi/binfmt:qemu-v8.1.5 platforms: all - name: Install cibuildwheel From c83f34c826d23c15905d77337d2356d297bc404e Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Thu, 6 Feb 2025 13:59:26 -0600 Subject: [PATCH 180/182] PYTHON-5047 Do not run nightly release check on forks (#2134) (cherry picked from commit a641337b5c98087884c0197628c772ced0f59965) --- .github/workflows/release-python.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/release-python.yml b/.github/workflows/release-python.yml index 45157bfc2b..d8c900e77b 100644 --- a/.github/workflows/release-python.yml +++ b/.github/workflows/release-python.yml @@ -37,6 +37,7 @@ jobs: pre-publish: environment: release runs-on: ubuntu-latest + if: github.repository_owner == 'mongodb' || github.event_name == 'workflow_dispatch' permissions: id-token: write contents: write From 4a6aae8f4364d87a0ea2b10c7745a0000ea83513 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Mon, 10 Feb 2025 13:41:26 -0600 Subject: [PATCH 181/182] PYTHON-5125 Prep for 4.11.1 release (#2137) --- .github/workflows/release-python.yml | 4 ++-- doc/changelog.rst | 7 ++++++- sbom.json | 21 +++++++++++---------- 3 files changed, 19 insertions(+), 13 deletions(-) diff --git a/.github/workflows/release-python.yml b/.github/workflows/release-python.yml index d8c900e77b..a6810854ff 100644 --- a/.github/workflows/release-python.yml +++ b/.github/workflows/release-python.yml @@ -20,8 +20,8 @@ env: # Changes per repo PRODUCT_NAME: PyMongo # Changes per branch - SILK_ASSET_GROUP: mongodb-python-driver - EVERGREEN_PROJECT: mongo-python-driver + SILK_ASSET_GROUP: mongodb-python-driver-4.11 + EVERGREEN_PROJECT: mongo-python-driver-release # Constant # inputs will be empty on a scheduled run. so, we only set dry_run # to 'false' when the input is set to 'false'. diff --git a/doc/changelog.rst b/doc/changelog.rst index 1f3efb8ad0..a1bda74944 100644 --- a/doc/changelog.rst +++ b/doc/changelog.rst @@ -1,7 +1,12 @@ Changelog ========= -Changes in Version 4.11.0 (YYYY/MM/DD) +Changes in Version 4.11.1 (2025/MM/DD) +-------------------------------------- + +- Fixed support for prebuilt ``ppc64le`` and ``s390x`` wheels. + +Changes in Version 4.11.0 (2025/01/28) -------------------------------------- .. warning:: PyMongo 4.11 drops support for Python 3.8 and PyPy 3.9: Python 3.9+ or PyPy 3.10+ is now required. diff --git a/sbom.json b/sbom.json index 56e27f5361..560e282a64 100644 --- a/sbom.json +++ b/sbom.json @@ -1,11 +1,12 @@ { - "metadata": { - "timestamp": "2024-05-02T17:36:12.698229+00:00" - }, - "components": [], - "serialNumber": "urn:uuid:9876a8a6-060e-486f-b128-910aecf0fe7b", - "version": 1, - "$schema": "http://cyclonedx.org/schema/bom-1.5.schema.json", - "bomFormat": "CycloneDX", - "specVersion": "1.5" - } \ No newline at end of file + "metadata": { + "timestamp": "2025-02-06T23:38:31.503894+00:00" + }, + "components": [], + "serialNumber": "urn:uuid:04738005-86f1-4d21-866f-ce4560f03f00", + "version": 1, + "$schema": "http://cyclonedx.org/schema/bom-1.5.schema.json", + "bomFormat": "CycloneDX", + "specVersion": "1.5", + "vulnerabilities": [] +} From d175edff9f1160e9259e7a8872c168a4fe445399 Mon Sep 17 00:00:00 2001 From: "mongodb-dbx-release-bot[bot]" <167856002+mongodb-dbx-release-bot[bot]@users.noreply.github.com> Date: Mon, 10 Feb 2025 19:42:27 +0000 Subject: [PATCH 182/182] BUMP 4.11.1 Signed-off-by: mongodb-dbx-release-bot[bot] <167856002+mongodb-dbx-release-bot[bot]@users.noreply.github.com> --- pymongo/_version.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pymongo/_version.py b/pymongo/_version.py index 22972c5ce4..7b72ca9aa6 100644 --- a/pymongo/_version.py +++ b/pymongo/_version.py @@ -18,7 +18,7 @@ import re from typing import List, Tuple, Union -__version__ = "4.11" +__version__ = "4.11.1" def get_version_tuple(version: str) -> Tuple[Union[int, str], ...]: