diff --git a/.evergreen/build-manylinux-internal.sh b/.evergreen/build-manylinux-internal.sh index 9943a983c4..267e647ffd 100755 --- a/.evergreen/build-manylinux-internal.sh +++ b/.evergreen/build-manylinux-internal.sh @@ -11,7 +11,7 @@ mv dist/* validdist || true # Compile wheels for PYTHON in /opt/python/*/bin/python; do - if [[ ! $PYTHON =~ (cp37|cp38|cp39|cp310|cp311|cp312) ]]; then + if [[ ! $PYTHON =~ (cp38|cp39|cp310|cp311|cp312) ]]; then continue fi # https://github.com/pypa/manylinux/issues/49 diff --git a/.evergreen/build-manylinux.sh b/.evergreen/build-manylinux.sh index 11cf1dd231..19f2b7f4aa 100755 --- a/.evergreen/build-manylinux.sh +++ b/.evergreen/build-manylinux.sh @@ -39,7 +39,6 @@ ls dist # Check for any unexpected files. unexpected=$(find dist \! \( -iname dist -or \ - -iname '*cp37*' -or \ -iname '*cp38*' -or \ -iname '*cp39*' -or \ -iname '*cp310*' -or \ diff --git a/.evergreen/build-windows.sh b/.evergreen/build-windows.sh index a3ed0c2f19..d30382fcee 100755 --- a/.evergreen/build-windows.sh +++ b/.evergreen/build-windows.sh @@ -8,7 +8,7 @@ rm -rf validdist mkdir -p validdist mv dist/* validdist || true -for VERSION in 37 38 39 310 311 312; do +for VERSION in 38 39 310 311 312; do _pythons=("C:/Python/Python${VERSION}/python.exe" \ "C:/Python/32/Python${VERSION}/python.exe") for PYTHON in "${_pythons[@]}"; do diff --git a/.evergreen/combine-coverage.sh b/.evergreen/combine-coverage.sh index f4aa3c29af..7db4a6cbc2 100644 --- a/.evergreen/combine-coverage.sh +++ b/.evergreen/combine-coverage.sh @@ -13,9 +13,9 @@ if [ -z "$PYTHON_BINARY" ]; then fi createvirtualenv "$PYTHON_BINARY" covenv -# coverage 7.3 dropped support for Python 3.7, keep in sync with run-tests.sh +# Keep in sync with run-tests.sh # coverage >=5 is needed for relative_files=true. -pip install -q "coverage>=5,<7.3" +pip install -q "coverage>=5,<=7.5" pip list ls -la coverage/ diff --git a/.evergreen/config.yml b/.evergreen/config.yml index a84b842148..12cce5bf77 100644 --- a/.evergreen/config.yml +++ b/.evergreen/config.yml @@ -58,14 +58,12 @@ functions: export MONGO_ORCHESTRATION_HOME="$DRIVERS_TOOLS/.evergreen/orchestration" export MONGODB_BINARIES="$DRIVERS_TOOLS/mongodb/bin" - export UPLOAD_BUCKET="${project}" cat < expansion.yml CURRENT_VERSION: "$CURRENT_VERSION" DRIVERS_TOOLS: "$DRIVERS_TOOLS" MONGO_ORCHESTRATION_HOME: "$MONGO_ORCHESTRATION_HOME" MONGODB_BINARIES: "$MONGODB_BINARIES" - UPLOAD_BUCKET: "$UPLOAD_BUCKET" PROJECT_DIRECTORY: "$PROJECT_DIRECTORY" PREPARE_SHELL: | set -o errexit @@ -73,7 +71,6 @@ functions: export DRIVERS_TOOLS="$DRIVERS_TOOLS" export MONGO_ORCHESTRATION_HOME="$MONGO_ORCHESTRATION_HOME" export MONGODB_BINARIES="$MONGODB_BINARIES" - export UPLOAD_BUCKET="$UPLOAD_BUCKET" export PROJECT_DIRECTORY="$PROJECT_DIRECTORY" export TMPDIR="$MONGO_ORCHESTRATION_HOME/db" @@ -103,30 +100,35 @@ functions: echo "{ \"releases\": { \"default\": \"$MONGODB_BINARIES\" }}" > $MONGO_ORCHESTRATION_HOME/orchestration.config "upload coverage" : + - command: ec2.assume_role + params: + role_arn: ${assume_role_arn} - command: s3.put params: - aws_key: ${aws_key} - aws_secret: ${aws_secret} + aws_key: ${AWS_ACCESS_KEY_ID} + aws_secret: ${AWS_SECRET_ACCESS_KEY} + aws_session_token: ${AWS_SESSION_TOKEN} local_file: src/.coverage optional: true # Upload the coverage report for all tasks in a single build to the same directory. - remote_file: ${UPLOAD_BUCKET}/coverage/${revision}/${version_id}/coverage/coverage.${build_variant}.${task_name} - bucket: mciuploads + remote_file: coverage/${revision}/${version_id}/coverage/coverage.${build_variant}.${task_name} + bucket: ${bucket_name} permissions: public-read content_type: text/html display_name: "Raw Coverage Report" "download and merge coverage" : + - command: ec2.assume_role + params: + role_arn: ${assume_role_arn} - command: shell.exec params: - silent: true working_dir: "src" + silent: true + include_expansions_in_env: ["AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN"] script: | - export AWS_ACCESS_KEY_ID=${aws_key} - export AWS_SECRET_ACCESS_KEY=${aws_secret} - # Download all the task coverage files. - aws s3 cp --recursive s3://mciuploads/${UPLOAD_BUCKET}/coverage/${revision}/${version_id}/coverage/ coverage/ + aws s3 cp --recursive s3://${bucket_name}/coverage/${revision}/${version_id}/coverage/ coverage/ - command: shell.exec params: working_dir: "src" @@ -136,20 +138,20 @@ functions: # Upload the resulting html coverage report. - command: shell.exec params: - silent: true working_dir: "src" + silent: true + include_expansions_in_env: ["AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN"] script: | - export AWS_ACCESS_KEY_ID=${aws_key} - export AWS_SECRET_ACCESS_KEY=${aws_secret} - aws s3 cp htmlcov/ s3://mciuploads/${UPLOAD_BUCKET}/coverage/${revision}/${version_id}/htmlcov/ --recursive --acl public-read --region us-east-1 + aws s3 cp htmlcov/ s3://${bucket_name}/coverage/${revision}/${version_id}/htmlcov/ --recursive --acl public-read --region us-east-1 # Attach the index.html with s3.put so it shows up in the Evergreen UI. - command: s3.put params: - aws_key: ${aws_key} - aws_secret: ${aws_secret} + aws_key: ${AWS_ACCESS_KEY_ID} + aws_secret: ${AWS_SECRET_ACCESS_KEY} + aws_session_token: ${AWS_SESSION_TOKEN} local_file: src/htmlcov/index.html - remote_file: ${UPLOAD_BUCKET}/coverage/${revision}/${version_id}/htmlcov/index.html - bucket: mciuploads + remote_file: coverage/${revision}/${version_id}/htmlcov/index.html + bucket: ${bucket_name} permissions: public-read content_type: text/html display_name: "Coverage Report HTML" @@ -172,34 +174,40 @@ functions: include: - "./**.core" - "./**.mdmp" # Windows: minidumps + - command: ec2.assume_role + params: + role_arn: ${assume_role_arn} - command: s3.put params: - aws_key: ${aws_key} - aws_secret: ${aws_secret} + aws_key: ${AWS_ACCESS_KEY_ID} + aws_secret: ${AWS_SECRET_ACCESS_KEY} + aws_session_token: ${AWS_SESSION_TOKEN} local_file: mongo-coredumps.tgz - remote_file: ${UPLOAD_BUCKET}/${build_variant}/${revision}/${version_id}/${build_id}/coredumps/${task_id}-${execution}-mongodb-coredumps.tar.gz - bucket: mciuploads + remote_file: ${build_variant}/${revision}/${version_id}/${build_id}/coredumps/${task_id}-${execution}-mongodb-coredumps.tar.gz + bucket: ${bucket_name} permissions: public-read content_type: ${content_type|application/gzip} display_name: Core Dumps - Execution optional: true - command: s3.put params: - aws_key: ${aws_key} - aws_secret: ${aws_secret} + aws_key: ${AWS_ACCESS_KEY_ID} + aws_secret: ${AWS_SECRET_ACCESS_KEY} + aws_session_token: ${AWS_SESSION_TOKEN} local_file: mongodb-logs.tar.gz - remote_file: ${UPLOAD_BUCKET}/${build_variant}/${revision}/${version_id}/${build_id}/logs/${task_id}-${execution}-mongodb-logs.tar.gz - bucket: mciuploads + remote_file: ${build_variant}/${revision}/${version_id}/${build_id}/logs/${task_id}-${execution}-mongodb-logs.tar.gz + bucket: ${bucket_name} permissions: public-read content_type: ${content_type|application/x-gzip} display_name: "mongodb-logs.tar.gz" - command: s3.put params: - aws_key: ${aws_key} - aws_secret: ${aws_secret} + aws_key: ${AWS_ACCESS_KEY_ID} + aws_secret: ${AWS_SECRET_ACCESS_KEY} + aws_session_token: ${AWS_SESSION_TOKEN} local_file: drivers-tools/.evergreen/orchestration/server.log - remote_file: ${UPLOAD_BUCKET}/${build_variant}/${revision}/${version_id}/${build_id}/logs/${task_id}-${execution}-orchestration.log - bucket: mciuploads + remote_file: ${build_variant}/${revision}/${version_id}/${build_id}/logs/${task_id}-${execution}-orchestration.log + bucket: ${bucket_name} permissions: public-read content_type: ${content_type|text/plain} display_name: "orchestration.log" @@ -211,13 +219,17 @@ functions: source_dir: ${PROJECT_DIRECTORY}/ include: - "./**" + - command: ec2.assume_role + params: + role_arn: ${assume_role_arn} - command: s3.put params: - aws_key: ${aws_key} - aws_secret: ${aws_secret} + aws_key: ${AWS_ACCESS_KEY_ID} + aws_secret: ${AWS_SECRET_ACCESS_KEY} + aws_session_token: ${AWS_SESSION_TOKEN} local_file: working-dir.tar.gz - remote_file: ${UPLOAD_BUCKET}/${build_variant}/${revision}/${version_id}/${build_id}/artifacts/${task_id}-${execution}-working-dir.tar.gz - bucket: mciuploads + remote_file: ${build_variant}/${revision}/${version_id}/${build_id}/artifacts/${task_id}-${execution}-working-dir.tar.gz + bucket: ${bucket_name} permissions: public-read content_type: ${content_type|application/x-gzip} display_name: "working-dir.tar.gz" @@ -232,11 +244,12 @@ functions: - "*.lock" - command: s3.put params: - aws_key: ${aws_key} - aws_secret: ${aws_secret} + aws_key: ${AWS_ACCESS_KEY_ID} + aws_secret: ${AWS_SECRET_ACCESS_KEY} + aws_session_token: ${AWS_SESSION_TOKEN} local_file: drivers-dir.tar.gz - remote_file: ${UPLOAD_BUCKET}/${build_variant}/${revision}/${version_id}/${build_id}/artifacts/${task_id}-${execution}-drivers-dir.tar.gz - bucket: mciuploads + remote_file: ${build_variant}/${revision}/${version_id}/${build_id}/artifacts/${task_id}-${execution}-drivers-dir.tar.gz + bucket: ${bucket_name} permissions: public-read content_type: ${content_type|application/x-gzip} display_name: "drivers-dir.tar.gz" @@ -791,27 +804,32 @@ functions: source_dir: "src/dist" include: - "*" + - command: ec2.assume_role + params: + role_arn: ${assume_role_arn} - command: s3.put params: - aws_key: ${aws_key} - aws_secret: ${aws_secret} + aws_key: ${AWS_ACCESS_KEY_ID} + aws_secret: ${AWS_SECRET_ACCESS_KEY} + aws_session_token: ${AWS_SESSION_TOKEN} local_file: release-files.tgz - remote_file: ${UPLOAD_BUCKET}/release/${revision}/${task_id}-${execution}-release-files.tar.gz - bucket: mciuploads + remote_file: release/${revision}/${task_id}-${execution}-release-files.tar.gz + bucket: ${bucket_name} permissions: public-read content_type: ${content_type|application/gzip} display_name: Release files "download and merge releases": + - command: ec2.assume_role + params: + role_arn: ${assume_role_arn} - command: shell.exec params: silent: true + include_expansions_in_env: ["AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN"] script: | - export AWS_ACCESS_KEY_ID=${aws_key} - export AWS_SECRET_ACCESS_KEY=${aws_secret} - # Download all the task coverage files. - aws s3 cp --recursive s3://mciuploads/${UPLOAD_BUCKET}/release/${revision}/ release/ + aws s3 cp --recursive s3://${bucket_name}/release/${revision}/ release/ - command: shell.exec params: shell: "bash" @@ -834,8 +852,8 @@ functions: done # Build source distribution. cd src/ - /opt/python/3.7/bin/python3 -m pip install build - /opt/python/3.7/bin/python3 -m build --sdist . + /opt/python/3.8/bin/python3 -m pip install build + /opt/python/3.8/bin/python3 -m build --sdist . cp dist/* ../releases - command: archive.targz_pack params: @@ -845,11 +863,12 @@ functions: - "*" - command: s3.put params: - aws_key: ${aws_key} - aws_secret: ${aws_secret} + aws_key: ${AWS_ACCESS_KEY_ID} + aws_secret: ${AWS_SECRET_ACCESS_KEY} + aws_session_token: ${AWS_SESSION_TOKEN} local_file: release-files-all.tgz - remote_file: ${UPLOAD_BUCKET}/release-all/${revision}/${task_id}-${execution}-release-files-all.tar.gz - bucket: mciuploads + remote_file: release-all/${revision}/${task_id}-${execution}-release-files-all.tar.gz + bucket: ${bucket_name} permissions: public-read content_type: ${content_type|application/gzip} display_name: Release files all @@ -962,7 +981,7 @@ task_groups: - ${DRIVERS_TOOLS}/.evergreen/csfle/azurekms/delete-vm.sh - func: "upload test results" setup_group_can_fail_task: true - teardown_group_can_fail_task: true + teardown_task_can_fail_task: true setup_group_timeout_secs: 1800 tasks: - testazurekms-task @@ -989,7 +1008,7 @@ task_groups: setup_group_can_fail_task: true setup_group_timeout_secs: 1800 tasks: - - oidc-auth-test-azure-latest + - oidc-auth-test-azure - name: testgcpoidc_task_group setup_group: @@ -1013,7 +1032,7 @@ task_groups: setup_group_can_fail_task: true setup_group_timeout_secs: 1800 tasks: - - oidc-auth-test-gcp-latest + - oidc-auth-test-gcp - name: testoidc_task_group setup_group: @@ -1026,6 +1045,9 @@ task_groups: params: binary: bash include_expansions_in_env: ["AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_SESSION_TOKEN"] + env: + # PYTHON-4447 + MONGODB_VERSION: "8.0" args: - ${DRIVERS_TOOLS}/.evergreen/auth_oidc/setup.sh teardown_task: @@ -1037,7 +1059,7 @@ task_groups: setup_group_can_fail_task: true setup_group_timeout_secs: 1800 tasks: - - oidc-auth-test-latest + - oidc-auth-test - name: test_aws_lambda_task_group setup_group: @@ -1095,7 +1117,7 @@ tasks: genhtml --version || true valgrind --version || true - - name: "release-mac-1100" + - name: "release-mac" tags: ["release_tag"] run_on: macos-1100 commands: @@ -1121,15 +1143,6 @@ tasks: VERSION: "3.8" - func: "upload release" - - name: "release-mac-1014" - tags: ["release_tag"] - run_on: macos-1014 - commands: - - func: "build release" - vars: - VERSION: "3.7" - - func: "upload release" - - name: "release-windows" tags: ["release_tag"] run_on: windows-64-vsMulti-small @@ -1348,6 +1361,33 @@ tasks: TOPOLOGY: "sharded_cluster" - func: "run tests" + - name: "test-8.0-standalone" + tags: ["8.0", "standalone"] + commands: + - func: "bootstrap mongo-orchestration" + vars: + VERSION: "8.0" + TOPOLOGY: "server" + - func: "run tests" + + - name: "test-8.0-replica_set" + tags: ["8.0", "replica_set"] + commands: + - func: "bootstrap mongo-orchestration" + vars: + VERSION: "8.0" + TOPOLOGY: "replica_set" + - func: "run tests" + + - name: "test-8.0-sharded_cluster" + tags: ["8.0", "sharded_cluster"] + commands: + - func: "bootstrap mongo-orchestration" + vars: + VERSION: "8.0" + TOPOLOGY: "sharded_cluster" + - func: "run tests" + - name: "test-7.0-standalone" tags: ["7.0", "standalone"] commands: @@ -1925,6 +1965,24 @@ tasks: - func: "run aws auth test with aws web identity credentials" - func: "run aws ECS auth test" + - name: "aws-auth-test-8.0" + commands: + - func: "bootstrap mongo-orchestration" + vars: + AUTH: "auth" + ORCHESTRATION_FILE: "auth-aws.json" + TOPOLOGY: "server" + VERSION: "8.0" + - func: "assume ec2 role" + - func: "get aws auth secrets" + - func: "run aws auth test with regular aws credentials" + - func: "run aws auth test with assume role credentials" + - func: "run aws auth test with aws credentials as environment variables" + - func: "run aws auth test with aws credentials and session token as environment variables" + - func: "run aws auth test with aws EC2 credentials" + - func: "run aws auth test with aws web identity credentials" + - func: "run aws ECS auth test" + - name: "aws-auth-test-rapid" commands: - func: "bootstrap mongo-orchestration" @@ -1970,13 +2028,14 @@ tasks: - func: "run load-balancer" - func: "run tests" - - name: "oidc-auth-test-latest" + - name: "oidc-auth-test" commands: - func: "run oidc auth test with test credentials" - - name: "oidc-auth-test-azure-latest" + - name: "oidc-auth-test-azure" commands: - command: shell.exec + type: test params: shell: bash script: |- @@ -1990,9 +2049,10 @@ tasks: export AZUREOIDC_TEST_CMD="OIDC_ENV=azure ./.evergreen/run-mongodb-oidc-test.sh" bash $DRIVERS_TOOLS/.evergreen/auth_oidc/azure/run-driver-test.sh - - name: "oidc-auth-test-gcp-latest" + - name: "oidc-auth-test-gcp" commands: - command: shell.exec + type: test params: shell: bash script: |- @@ -2157,13 +2217,6 @@ axes: - id: platform display_name: OS values: - - id: macos-1014 - display_name: "macOS 10.14" - run_on: macos-1014 - variables: - skip_EC2_auth_test: true - skip_ECS_auth_test: true - skip_web_identity_auth_test: true - id: macos-1100 display_name: "macOS 11.00" run_on: macos-1100 @@ -2186,9 +2239,9 @@ axes: display_name: "RHEL 8.x" run_on: rhel87-small batchtime: 10080 # 7 days - - id: rhel80-fips - display_name: "RHEL 8.0 FIPS" - run_on: rhel80-fips + - id: rhel92-fips + display_name: "RHEL 9.2 FIPS" + run_on: rhel92-fips batchtime: 10080 # 7 days - id: ubuntu-22.04 display_name: "Ubuntu 22.04" @@ -2311,6 +2364,10 @@ axes: display_name: "MongoDB 7.0" variables: VERSION: "7.0" + - id: "8.0" + display_name: "MongoDB 8.0" + variables: + VERSION: "8.0" - id: "latest" display_name: "MongoDB latest" variables: @@ -2326,10 +2383,6 @@ axes: values: # Note: always display platform with python-version to avoid ambiguous display names. # Linux - - id: "3.7" - display_name: "Python 3.7" - variables: - PYTHON_BINARY: "/opt/python/3.7/bin/python3" - id: "3.8" display_name: "Python 3.8" variables: @@ -2350,10 +2403,10 @@ axes: display_name: "Python 3.12" variables: PYTHON_BINARY: "/opt/python/3.12/bin/python3" - - id: "pypy3.8" - display_name: "PyPy 3.8" + - id: "pypy3.9" + display_name: "PyPy 3.9" variables: - PYTHON_BINARY: "/opt/python/pypy3.8/bin/pypy3" + PYTHON_BINARY: "/opt/python/pypy3.9/bin/pypy3" - id: "pypy3.10" display_name: "PyPy 3.10" variables: @@ -2362,10 +2415,6 @@ axes: - id: python-version-windows display_name: "Python" values: - - id: "3.7" - display_name: "Python 3.7" - variables: - PYTHON_BINARY: "C:/python/Python37/python.exe" - id: "3.8" display_name: "Python 3.8" variables: @@ -2390,10 +2439,6 @@ axes: - id: python-version-windows-32 display_name: "Python" values: - - id: "3.7" - display_name: "32-bit Python 3.7" - variables: - PYTHON_BINARY: "C:/python/32/Python37/python.exe" - id: "3.8" display_name: "32-bit Python 3.8" variables: @@ -2570,7 +2615,7 @@ buildvariants: - matrix_name: "tests-fips" matrix_spec: platform: - - rhel80-fips + - rhel92-fips auth: "auth" ssl: "ssl" display_name: "${platform} ${auth} ${ssl}" @@ -2582,17 +2627,18 @@ buildvariants: platform: # MacOS introduced SSL support with MongoDB >= 3.2. # Older server versions (2.6, 3.0) are supported without SSL. - - macos-1014 + - macos-1100 auth: "*" ssl: "*" exclude_spec: # No point testing with SSL without auth. - - platform: macos-1014 + - platform: macos-1100 auth: "noauth" ssl: "ssl" display_name: "${platform} ${auth} ${ssl}" tasks: - ".latest" + - ".8.0" - ".7.0" - ".6.0" - ".5.0" @@ -2609,6 +2655,7 @@ buildvariants: display_name: "${platform} ${auth-ssl}" tasks: - ".latest" + - ".8.0" - ".7.0" - ".6.0" - ".5.0" @@ -2633,6 +2680,7 @@ buildvariants: add_tasks: &encryption-server-versions - ".rapid" - ".latest" + - ".8.0" - ".7.0" - ".6.0" - ".5.0" @@ -2662,6 +2710,7 @@ buildvariants: tasks: &all-server-versions - ".rapid" - ".latest" + - ".8.0" - ".7.0" - ".6.0" - ".5.0" @@ -2677,10 +2726,10 @@ buildvariants: auth: "*" ssl: "ssl" pyopenssl: "*" - # Only test "noauth" with Python 3.7. + # Only test "noauth" with Python 3.8. exclude_spec: platform: rhel8 - python-version: ["3.8", "3.9", "3.10", "pypy3.8", "pypy3.10"] + python-version: ["3.9", "3.10", "pypy3.9", "pypy3.10"] auth: "noauth" ssl: "ssl" pyopenssl: "*" @@ -2692,7 +2741,7 @@ buildvariants: - matrix_name: "tests-pyopenssl-macOS" matrix_spec: - platform: macos-1014 + platform: macos-1100 auth: "auth" ssl: "ssl" pyopenssl: "*" @@ -2741,7 +2790,7 @@ buildvariants: exclude_spec: # These interpreters are always tested without extensions. - platform: rhel8 - python-version: ["pypy3.8", "pypy3.10"] + python-version: ["pypy3.9", "pypy3.10"] c-extensions: "*" auth-ssl: "*" coverage: "*" @@ -2757,7 +2806,7 @@ buildvariants: exclude_spec: # These interpreters are always tested without extensions. - platform: rhel8 - python-version: ["pypy3.8", "pypy3.10"] + python-version: ["pypy3.9", "pypy3.10"] c-extensions: "with-c-extensions" compression: "*" display_name: "${compression} ${c-extensions} ${python-version} ${platform}" @@ -2786,7 +2835,7 @@ buildvariants: exclude_spec: # Don't test green frameworks on these Python versions. - platform: rhel8 - python-version: ["pypy3.8", "pypy3.10"] + python-version: ["pypy3.9", "pypy3.10"] green-framework: "*" auth-ssl: "*" display_name: "${green-framework} ${python-version} ${platform} ${auth-ssl}" @@ -2812,7 +2861,7 @@ buildvariants: matrix_spec: platform: rhel7 # Python 3.10+ requires OpenSSL 1.1.1+ - python-version: ["3.7", "3.8", "3.9", "pypy3.8", "pypy3.10"] + python-version: ["3.8", "3.9", "pypy3.9", "pypy3.10"] auth-ssl: "*" display_name: "OpenSSL 1.0.2 ${python-version} ${platform} ${auth-ssl}" tasks: @@ -2835,12 +2884,12 @@ buildvariants: then: add_tasks: *encryption-server-versions -# Storage engine tests on RHEL 8.4 (x86_64) with Python 3.7. +# Storage engine tests on RHEL 8.4 (x86_64) with Python 3.8. - matrix_name: "tests-storage-engines" matrix_spec: platform: rhel8 storage-engine: "*" - python-version: 3.7 + python-version: 3.8 display_name: "Storage ${storage-engine} ${python-version} ${platform}" rules: - if: @@ -2850,6 +2899,7 @@ buildvariants: then: add_tasks: - "test-latest-standalone" + - "test-8.0-standalone" - "test-7.0-standalone" - "test-6.0-standalone" - "test-5.0-standalone" @@ -2869,12 +2919,12 @@ buildvariants: - "test-3.6-standalone" - "test-3.6-replica_set" -# enableTestCommands=0 tests on RHEL 8.4 (x86_64) with Python 3.7. +# enableTestCommands=0 tests on RHEL 8.4 (x86_64) with Python 3.8. - matrix_name: "test-disableTestCommands" matrix_spec: platform: rhel8 disableTestCommands: "*" - python-version: "3.7" + python-version: "3.8" display_name: "Disable test commands ${python-version} ${platform}" tasks: - ".latest" @@ -2908,7 +2958,7 @@ buildvariants: - matrix_name: "tests-mod-wsgi" matrix_spec: platform: ubuntu-22.04 - python-version: ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] + python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] mod-wsgi-version: "*" display_name: "${mod-wsgi-version} ${python-version} ${platform}" tasks: @@ -2920,7 +2970,7 @@ buildvariants: - matrix_name: "mockupdb-tests" matrix_spec: platform: rhel8 - python-version: 3.7 + python-version: 3.8 display_name: "MockupDB Tests" tasks: - name: "mockupdb" @@ -2968,7 +3018,7 @@ buildvariants: - matrix_name: "serverless_proxy" matrix_spec: platform: rhel8 - python-version: ["3.7", "3.10"] + python-version: ["3.8", "3.10"] auth-ssl: auth-ssl serverless: "proxy" display_name: "${serverless} ${python-version} ${platform}" @@ -2978,7 +3028,7 @@ buildvariants: - matrix_name: "data-lake-spec-tests" matrix_spec: platform: ubuntu-22.04 - python-version: ["3.7", "3.10"] + python-version: ["3.8", "3.10"] auth: "auth" c-extensions: "*" display_name: "Atlas Data Lake ${python-version} ${c-extensions}" @@ -2988,7 +3038,7 @@ buildvariants: - matrix_name: "stable-api-tests" matrix_spec: platform: rhel8 - python-version: ["3.7", "3.10"] + python-version: ["3.8", "3.10"] auth: "auth" versionedApi: "*" display_name: "Versioned API ${versionedApi} ${python-version}" @@ -3001,8 +3051,8 @@ buildvariants: - matrix_name: "ocsp-test" matrix_spec: platform: rhel8 - python-version: ["3.7", "3.10", "pypy3.8", "pypy3.10"] - mongodb-version: ["4.4", "5.0", "6.0", "7.0", "latest"] + python-version: ["3.8", "3.10", "pypy3.9", "pypy3.10"] + mongodb-version: ["4.4", "5.0", "6.0", "7.0", "8.0", "latest"] auth: "noauth" ssl: "ssl" display_name: "OCSP test ${platform} ${python-version} ${mongodb-version}" @@ -3013,8 +3063,8 @@ buildvariants: - matrix_name: "ocsp-test-windows" matrix_spec: platform: windows-64-vsMulti-small - python-version-windows: ["3.7", "3.10"] - mongodb-version: ["4.4", "5.0", "6.0", "7.0", "latest"] + python-version-windows: ["3.8", "3.10"] + mongodb-version: ["4.4", "5.0", "6.0", "7.0", "8.0", "latest"] auth: "noauth" ssl: "ssl" display_name: "OCSP test ${platform} ${python-version-windows} ${mongodb-version}" @@ -3025,8 +3075,8 @@ buildvariants: - matrix_name: "ocsp-test-macos" matrix_spec: - platform: macos-1014 - mongodb-version: ["4.4", "5.0", "6.0", "7.0", "latest"] + platform: macos-1100 + mongodb-version: ["4.4", "5.0", "6.0", "7.0", "8.0", "latest"] auth: "noauth" ssl: "ssl" display_name: "OCSP test ${platform} ${mongodb-version}" @@ -3067,18 +3117,20 @@ buildvariants: - name: "aws-auth-test-5.0" - name: "aws-auth-test-6.0" - name: "aws-auth-test-7.0" + - name: "aws-auth-test-8.0" - name: "aws-auth-test-rapid" - name: "aws-auth-test-latest" - matrix_name: "aws-auth-test-mac" matrix_spec: - platform: [macos-1014] + platform: [macos-1100] display_name: "MONGODB-AWS Auth ${platform} ${python-version-mac}" tasks: - name: "aws-auth-test-4.4" - name: "aws-auth-test-5.0" - name: "aws-auth-test-6.0" - name: "aws-auth-test-7.0" + - name: "aws-auth-test-8.0" - name: "aws-auth-test-rapid" - name: "aws-auth-test-latest" @@ -3092,13 +3144,14 @@ buildvariants: - name: "aws-auth-test-5.0" - name: "aws-auth-test-6.0" - name: "aws-auth-test-7.0" + - name: "aws-auth-test-8.0" - name: "aws-auth-test-rapid" - name: "aws-auth-test-latest" - matrix_name: "load-balancer" matrix_spec: platform: rhel8 - mongodb-version: ["6.0", "7.0", "rapid", "latest"] + mongodb-version: ["6.0", "7.0", "8.0", "rapid", "latest"] auth-ssl: "*" python-version: "*" loadbalancer: "*" diff --git a/.evergreen/run-import-time-test.sh b/.evergreen/run-import-time-test.sh index f013eb115c..2b17f5ffeb 100755 --- a/.evergreen/run-import-time-test.sh +++ b/.evergreen/run-import-time-test.sh @@ -25,7 +25,9 @@ function get_import_time() { } get_import_time $HEAD_SHA +git stash git checkout $BASE_SHA get_import_time $BASE_SHA git checkout $HEAD_SHA +git stash apply python tools/compare_import_time.py $HEAD_SHA $BASE_SHA diff --git a/.evergreen/run-mod-wsgi-tests.sh b/.evergreen/run-mod-wsgi-tests.sh index afb3f271ae..e1f5238110 100644 --- a/.evergreen/run-mod-wsgi-tests.sh +++ b/.evergreen/run-mod-wsgi-tests.sh @@ -19,7 +19,10 @@ fi PYTHON_VERSION=$(${PYTHON_BINARY} -c "import sys; sys.stdout.write('.'.join(str(val) for val in sys.version_info[:2]))") # Ensure the C extensions are installed. -${PYTHON_BINARY} setup.py build_ext -i +${PYTHON_BINARY} -m venv --system-site-packages .venv +source .venv/bin/activate +pip install -U pip +python -m pip install -e . export MOD_WSGI_SO=/opt/python/mod_wsgi/python_version/$PYTHON_VERSION/mod_wsgi_version/$MOD_WSGI_VERSION/mod_wsgi.so export PYTHONHOME=/opt/python/$PYTHON_VERSION @@ -38,10 +41,12 @@ trap '$APACHE -k stop -f ${PROJECT_DIRECTORY}/test/mod_wsgi_test/${APACHE_CONFIG wget -t 1 -T 10 -O - "http://localhost:8080/interpreter1${PROJECT_DIRECTORY}" || (cat error_log && exit 1) wget -t 1 -T 10 -O - "http://localhost:8080/interpreter2${PROJECT_DIRECTORY}" || (cat error_log && exit 1) -${PYTHON_BINARY} ${PROJECT_DIRECTORY}/test/mod_wsgi_test/test_client.py -n 25000 -t 100 parallel \ +python ${PROJECT_DIRECTORY}/test/mod_wsgi_test/test_client.py -n 25000 -t 100 parallel \ http://localhost:8080/interpreter1${PROJECT_DIRECTORY} http://localhost:8080/interpreter2${PROJECT_DIRECTORY} || \ (tail -n 100 error_log && exit 1) -${PYTHON_BINARY} ${PROJECT_DIRECTORY}/test/mod_wsgi_test/test_client.py -n 25000 serial \ +python ${PROJECT_DIRECTORY}/test/mod_wsgi_test/test_client.py -n 25000 serial \ http://localhost:8080/interpreter1${PROJECT_DIRECTORY} http://localhost:8080/interpreter2${PROJECT_DIRECTORY} || \ (tail -n 100 error_log && exit 1) + +rm -rf .venv diff --git a/.evergreen/run-tests.sh b/.evergreen/run-tests.sh index b11acacd84..d47e3a9505 100755 --- a/.evergreen/run-tests.sh +++ b/.evergreen/run-tests.sh @@ -31,9 +31,6 @@ set -o xtrace AUTH=${AUTH:-noauth} SSL=${SSL:-nossl} TEST_ARGS="${*:1}" -PYTHON=$(which python) -# TODO: Remove when we drop PyPy 3.8 support. -OLD_PYPY=$(python -c "import sys; print(sys.implementation.name.lower() == 'pypy' and sys.implementation.version < (7, 3, 12))") export PIP_QUIET=1 # Quiet by default export PIP_PREFER_BINARY=1 # Prefer binary dists by default @@ -113,10 +110,6 @@ fi if [ "$COMPRESSORS" = "snappy" ]; then python -m pip install '.[snappy]' - if [ "$OLD_PYPY" == "True" ]; then - pip install "python-snappy<0.7.0" - fi - PYTHON=python elif [ "$COMPRESSORS" = "zstd" ]; then python -m pip install zstandard fi @@ -158,6 +151,7 @@ if [ -n "$TEST_ENCRYPTION" ] || [ -n "$TEST_FLE_AZURE_AUTO" ] || [ -n "$TEST_FLE if [ ! -d "libmongocrypt_git" ]; then git clone https://github.com/mongodb/libmongocrypt.git libmongocrypt_git fi + python -m pip install -U setuptools python -m pip install ./libmongocrypt_git/bindings/python python -c "import pymongocrypt; print('pymongocrypt version: '+pymongocrypt.__version__)" python -c "import pymongocrypt; print('libmongocrypt version: '+pymongocrypt.libmongocrypt_version())" @@ -236,7 +230,7 @@ if [ -n "$PERF_TEST" ]; then TEST_ARGS="test/performance/perf_test.py" fi -echo "Running $AUTH tests over $SSL with python $PYTHON" +echo "Running $AUTH tests over $SSL with python $(which python)" python -c 'import sys; print(sys.version)' @@ -245,11 +239,11 @@ python -c 'import sys; print(sys.version)' # Run the tests with coverage if requested and coverage is installed. # Only cover CPython. PyPy reports suspiciously low coverage. -PYTHON_IMPL=$($PYTHON -c "import platform; print(platform.python_implementation())") +PYTHON_IMPL=$(python -c "import platform; print(platform.python_implementation())") if [ -n "$COVERAGE" ] && [ "$PYTHON_IMPL" = "CPython" ]; then - # coverage 7.3 dropped support for Python 3.7, keep in sync with combine-coverage.sh. + # Keep in sync with combine-coverage.sh. # coverage >=5 is needed for relative_files=true. - python -m pip install pytest-cov "coverage>=5,<7.3" + python -m pip install pytest-cov "coverage>=5,<=7.5" TEST_ARGS="$TEST_ARGS --cov" fi diff --git a/.evergreen/utils.sh b/.evergreen/utils.sh index 35005c0d6a..f0a5851d91 100755 --- a/.evergreen/utils.sh +++ b/.evergreen/utils.sh @@ -4,8 +4,8 @@ set -o xtrace find_python3() { PYTHON="" - # Add a fallback system python3 if it is available and Python 3.7+. - if is_python_37 "$(command -v python3)"; then + # Add a fallback system python3 if it is available and Python 3.8+. + if is_python_38 "$(command -v python3)"; then PYTHON="$(command -v python3)" fi # Find a suitable toolchain version, if available. @@ -14,23 +14,23 @@ find_python3() { if [ -d "/Library/Frameworks/Python.Framework/Versions/3.10" ]; then PYTHON="/Library/Frameworks/Python.Framework/Versions/3.10/bin/python3" # macos 10.14 - elif [ -d "/Library/Frameworks/Python.Framework/Versions/3.7" ]; then - PYTHON="/Library/Frameworks/Python.Framework/Versions/3.7/bin/python3" + elif [ -d "/Library/Frameworks/Python.Framework/Versions/3.8" ]; then + PYTHON="/Library/Frameworks/Python.Framework/Versions/3.8/bin/python3" fi elif [ "Windows_NT" = "$OS" ]; then # Magic variable in cygwin - PYTHON="C:/python/Python37/python.exe" + PYTHON="C:/python/Python38/python.exe" else - # Prefer our own toolchain, fall back to mongodb toolchain if it has Python 3.7+. - if [ -f "/opt/python/3.7/bin/python3" ]; then - PYTHON="/opt/python/3.7/bin/python3" - elif is_python_37 "$(command -v /opt/mongodbtoolchain/v4/bin/python3)"; then + # Prefer our own toolchain, fall back to mongodb toolchain if it has Python 3.8+. + if [ -f "/opt/python/3.8/bin/python3" ]; then + PYTHON="/opt/python/3.8/bin/python3" + elif is_python_38 "$(command -v /opt/mongodbtoolchain/v4/bin/python3)"; then PYTHON="/opt/mongodbtoolchain/v4/bin/python3" - elif is_python_37 "$(command -v /opt/mongodbtoolchain/v3/bin/python3)"; then + elif is_python_38 "$(command -v /opt/mongodbtoolchain/v3/bin/python3)"; then PYTHON="/opt/mongodbtoolchain/v3/bin/python3" fi fi if [ -z "$PYTHON" ]; then - echo "Cannot test without python3.7+ installed!" + echo "Cannot test without python3.8+ installed!" exit 1 fi echo "$PYTHON" @@ -66,7 +66,7 @@ createvirtualenv () { export PIP_QUIET=1 python -m pip install --upgrade pip - python -m pip install --upgrade setuptools tox + python -m pip install --upgrade tox } # Usage: @@ -96,15 +96,15 @@ testinstall () { fi } -# Function that returns success if the provided Python binary is version 3.7 or later +# Function that returns success if the provided Python binary is version 3.8 or later # Usage: -# is_python_37 /path/to/python +# is_python_38 /path/to/python # * param1: Python binary -is_python_37() { +is_python_38() { if [ -z "$1" ]; then return 1 - elif $1 -c "import sys; exit(sys.version_info[:2] < (3, 7))"; then - # runs when sys.version_info[:2] >= (3, 7) + elif $1 -c "import sys; exit(sys.version_info[:2] < (3, 8))"; then + # runs when sys.version_info[:2] >= (3, 8) return 0 else return 1 diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml new file mode 100644 index 0000000000..370b8759e6 --- /dev/null +++ b/.github/workflows/codeql.yml @@ -0,0 +1,66 @@ +name: "CodeQL" + +on: + push: + branches: [ "master", "v*"] + tags: ['*'] + pull_request: + workflow_call: + inputs: + ref: + required: true + type: string + schedule: + - cron: '17 10 * * 2' + +concurrency: + group: codeql-${{ github.ref }} + cancel-in-progress: true + +jobs: + analyze: + name: Analyze (${{ matrix.language }}) + runs-on: "ubuntu-latest" + timeout-minutes: 360 + permissions: + # required for all workflows + security-events: write + + strategy: + fail-fast: false + matrix: + include: + - language: c-cpp + build-mode: manual + - language: python + build-mode: none + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + ref: ${{ inputs.ref }} + - uses: actions/setup-python@v3 + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v3 + with: + languages: ${{ matrix.language }} + build-mode: ${{ matrix.build-mode }} + # For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs + queries: security-extended + config: | + paths-ignore: + - '.github/**' + - 'doc/**' + - 'tools/**' + - 'test/**' + + - if: matrix.build-mode == 'manual' + run: | + pip install -e . + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v3 + with: + category: "/language:${{matrix.language}}" diff --git a/.github/workflows/dist.yml b/.github/workflows/dist.yml new file mode 100644 index 0000000000..7ec55dd3b3 --- /dev/null +++ b/.github/workflows/dist.yml @@ -0,0 +1,146 @@ +name: Python Dist + +on: + push: + tags: + - "[0-9]+.[0-9]+.[0-9]+" + - "[0-9]+.[0-9]+.[0-9]+.post[0-9]+" + - "[0-9]+.[0-9]+.[0-9]+[a-b][0-9]+" + - "[0-9]+.[0-9]+.[0-9]+rc[0-9]+" + workflow_dispatch: + pull_request: + workflow_call: + inputs: + ref: + required: true + type: string + +concurrency: + group: dist-${{ github.ref }} + cancel-in-progress: true + +defaults: + run: + shell: bash -eux {0} + +jobs: + build_wheels: + name: Build wheels for ${{ matrix.buildplat[1] }} + runs-on: ${{ matrix.buildplat[0] }} + strategy: + # Ensure that a wheel builder finishes even if another fails + fail-fast: false + matrix: + # Github Actions doesn't support pairing matrix values together, let's improvise + # https://github.com/github/feedback/discussions/7835#discussioncomment-1769026 + buildplat: + - [ubuntu-20.04, "manylinux_x86_64", "cp3*-manylinux_x86_64"] + - [ubuntu-20.04, "manylinux_aarch64", "cp3*-manylinux_aarch64"] + - [ubuntu-20.04, "manylinux_ppc64le", "cp3*-manylinux_ppc64le"] + - [ubuntu-20.04, "manylinux_s390x", "cp3*-manylinux_s390x"] + - [ubuntu-20.04, "manylinux_i686", "cp3*-manylinux_i686"] + - [windows-2019, "win_amd6", "cp3*-win_amd64"] + - [windows-2019, "win32", "cp3*-win32"] + - [macos-14, "macos", "cp*-macosx_*"] + + steps: + - name: Checkout pymongo + uses: actions/checkout@v4 + with: + fetch-depth: 0 + ref: ${{ inputs.ref }} + + - uses: actions/setup-python@v5 + with: + cache: 'pip' + python-version: 3.8 + cache-dependency-path: 'pyproject.toml' + allow-prereleases: true + + - name: Set up QEMU + if: runner.os == 'Linux' + uses: docker/setup-qemu-action@v3 + with: + platforms: all + + - name: Install cibuildwheel + # Note: the default manylinux is manylinux2014 + run: | + python -m pip install -U pip + python -m pip install "cibuildwheel>=2.17,<3" + + - name: Build wheels + env: + CIBW_BUILD: ${{ matrix.buildplat[2] }} + run: python -m cibuildwheel --output-dir wheelhouse + + - name: Build manylinux1 wheels + if: ${{ matrix.buildplat[1] == 'manylinux_x86_64' || matrix.buildplat[1] == 'manylinux_i686' }} + env: + CIBW_MANYLINUX_X86_64_IMAGE: manylinux1 + CIBW_MANYLINUX_I686_IMAGE: manylinux1 + CIBW_BUILD: "cp38-${{ matrix.buildplat[1] }} cp39-${{ matrix.buildplat[1] }}" + run: python -m cibuildwheel --output-dir wheelhouse + + - name: Assert all versions in wheelhouse + if: ${{ ! startsWith(matrix.buildplat[1], 'macos') }} + run: | + ls wheelhouse/*cp38*.whl + ls wheelhouse/*cp39*.whl + ls wheelhouse/*cp310*.whl + ls wheelhouse/*cp311*.whl + ls wheelhouse/*cp312*.whl + + - uses: actions/upload-artifact@v4 + with: + name: wheel-${{ matrix.buildplat[1] }} + path: ./wheelhouse/*.whl + if-no-files-found: error + + make_sdist: + name: Make SDist + runs-on: macos-13 + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + ref: ${{ inputs.ref }} + + - uses: actions/setup-python@v5 + with: + # Build sdist on lowest supported Python + python-version: '3.8' + + - name: Build SDist + run: | + set -ex + python -m pip install -U pip build + python -m build --sdist . + + - name: Test SDist + run: | + python -m pip install dist/*.gz + cd .. + python -c "from pymongo import has_c; assert has_c()" + + - uses: actions/upload-artifact@v4 + with: + name: "sdist" + path: ./dist/*.tar.gz + + collect_dist: + runs-on: ubuntu-latest + needs: [build_wheels, make_sdist] + name: Download Wheels + steps: + - name: Download all workflow run artifacts + uses: actions/download-artifact@v4 + - name: Flatten directory + working-directory: . + run: | + find . -mindepth 2 -type f -exec mv {} . \; + find . -type d -empty -delete + - uses: actions/upload-artifact@v4 + with: + name: all-dist-${{ github.run_id }} + path: "./*" diff --git a/.github/workflows/release-python.yml b/.github/workflows/release-python.yml index c17d4046e9..2ce2c50033 100644 --- a/.github/workflows/release-python.yml +++ b/.github/workflows/release-python.yml @@ -1,157 +1,95 @@ -name: Python Wheels +name: Release on: - push: - tags: - - "[0-9]+.[0-9]+.[0-9]+" - - "[0-9]+.[0-9]+.[0-9]+.post[0-9]+" - - "[0-9]+.[0-9]+.[0-9]+[a-b][0-9]+" - - "[0-9]+.[0-9]+.[0-9]+rc[0-9]+" workflow_dispatch: - pull_request: - -concurrency: - group: wheels-${{ github.ref }} - cancel-in-progress: true + inputs: + version: + description: "The new version to set" + required: true + following_version: + description: "The post (dev) version to set" + required: true + dry_run: + description: "Dry Run?" + default: false + type: boolean + +env: + # Changes per repo + PRODUCT_NAME: PyMongo + # Changes per branch + SILK_ASSET_GROUP: mongodb-python-driver + EVERGREEN_PROJECT: mongo-python-driver-v4.8 defaults: run: shell: bash -eux {0} jobs: - build_wheels: - name: Build wheels for ${{ matrix.buildplat[1] }} - runs-on: ${{ matrix.buildplat[0] }} - strategy: - # Ensure that a wheel builder finishes even if another fails - fail-fast: false - matrix: - # Github Actions doesn't support pairing matrix values together, let's improvise - # https://github.com/github/feedback/discussions/7835#discussioncomment-1769026 - buildplat: - - [ubuntu-20.04, "manylinux_x86_64", "cp3*-manylinux_x86_64"] - - [ubuntu-20.04, "manylinux_aarch64", "cp3*-manylinux_aarch64"] - - [ubuntu-20.04, "manylinux_ppc64le", "cp3*-manylinux_ppc64le"] - - [ubuntu-20.04, "manylinux_s390x", "cp3*-manylinux_s390x"] - - [ubuntu-20.04, "manylinux_i686", "cp3*-manylinux_i686"] - - [windows-2019, "win_amd6", "cp3*-win_amd64"] - - [windows-2019, "win32", "cp3*-win32"] - - [macos-14, "macos", "cp*-macosx_*"] - + pre-publish: + environment: release + runs-on: ubuntu-latest + permissions: + id-token: write + contents: write + outputs: + version: ${{ steps.pre-publish.outputs.version }} steps: - - name: Checkout pymongo - uses: actions/checkout@v4 + - uses: mongodb-labs/drivers-github-tools/secure-checkout@v2 with: - fetch-depth: 0 - - - uses: actions/setup-python@v5 + app_id: ${{ vars.APP_ID }} + private_key: ${{ secrets.APP_PRIVATE_KEY }} + - uses: mongodb-labs/drivers-github-tools/setup@v2 with: - cache: 'pip' - python-version: 3.8 - cache-dependency-path: 'pyproject.toml' - allow-prereleases: true - - - name: Set up QEMU - if: runner.os == 'Linux' - uses: docker/setup-qemu-action@v3 + aws_role_arn: ${{ secrets.AWS_ROLE_ARN }} + aws_region_name: ${{ vars.AWS_REGION_NAME }} + aws_secret_id: ${{ secrets.AWS_SECRET_ID }} + artifactory_username: ${{ vars.ARTIFACTORY_USERNAME }} + - uses: mongodb-labs/drivers-github-tools/python/pre-publish@v2 + id: pre-publish with: - platforms: all + version: ${{ inputs.version }} + dry_run: ${{ inputs.dry_run }} - - name: Install cibuildwheel - # Note: the default manylinux is manylinux2014 - run: | - python -m pip install -U pip - python -m pip install "cibuildwheel>=2.17,<3" + build-dist: + needs: [pre-publish] + uses: ./.github/workflows/dist.yml + with: + ref: ${{ needs.pre-publish.outputs.version }} - - name: Build wheels - env: - CIBW_BUILD: ${{ matrix.buildplat[2] }} - run: python -m cibuildwheel --output-dir wheelhouse - - - name: Build manylinux1 wheels - if: ${{ matrix.buildplat[1] == 'manylinux_x86_64' || matrix.buildplat[1] == 'manylinux_i686' }} - env: - CIBW_MANYLINUX_X86_64_IMAGE: manylinux1 - CIBW_MANYLINUX_I686_IMAGE: manylinux1 - CIBW_BUILD: "cp37-${{ matrix.buildplat[1] }} cp38-${{ matrix.buildplat[1] }} cp39-${{ matrix.buildplat[1] }}" - run: python -m cibuildwheel --output-dir wheelhouse - - - name: Assert all versions in wheelhouse - if: ${{ ! startsWith(matrix.buildplat[1], 'macos') }} - run: | - ls wheelhouse/*cp37*.whl - ls wheelhouse/*cp38*.whl - ls wheelhouse/*cp39*.whl - ls wheelhouse/*cp310*.whl - ls wheelhouse/*cp311*.whl - ls wheelhouse/*cp312*.whl - - - uses: actions/upload-artifact@v4 - with: - name: wheel-${{ matrix.buildplat[1] }} - path: ./wheelhouse/*.whl - if-no-files-found: error - - make_sdist: - name: Make SDist - runs-on: macos-13 - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - - uses: actions/setup-python@v5 - with: - # Build sdist on lowest supported Python - python-version: '3.7' - - - name: Build SDist - run: | - set -ex - python -m pip install -U pip build - python -m build --sdist . - - - name: Test SDist - run: | - python -m pip install dist/*.gz - cd .. - python -c "from pymongo import has_c; assert has_c()" - - - uses: actions/upload-artifact@v4 - with: - name: "sdist" - path: ./dist/*.tar.gz - - collect_dist: - runs-on: ubuntu-latest - needs: [build_wheels, make_sdist] - name: Download Wheels - steps: - - name: Download all workflow run artifacts - uses: actions/download-artifact@v4 - - name: Flatten directory - working-directory: . - run: | - find . -mindepth 2 -type f -exec mv {} . \; - find . -type d -empty -delete - - uses: actions/upload-artifact@v4 - with: - name: all-dist-${{ github.job }} - path: "./*" + static-scan: + needs: [pre-publish] + permissions: + security-events: write + uses: ./.github/workflows/codeql.yml + with: + ref: ${{ needs.pre-publish.outputs.version }} publish: - # https://packaging.python.org/en/latest/guides/publishing-package-distribution-releases-using-github-actions-ci-cd-workflows/#publishing-the-distribution-to-pypi - needs: [collect_dist] - if: startsWith(github.ref, 'refs/tags/') + needs: [build-dist, static-scan] runs-on: ubuntu-latest environment: release permissions: id-token: write + contents: write + security-events: write steps: - - name: Download all the dists - uses: actions/download-artifact@v4 - with: - name: all-dist-${{ github.job }} - path: dist/ - - name: Publish distribution 📦 to PyPI - uses: pypa/gh-action-pypi-publish@release/v1 + - uses: mongodb-labs/drivers-github-tools/secure-checkout@v2 + with: + app_id: ${{ vars.APP_ID }} + private_key: ${{ secrets.APP_PRIVATE_KEY }} + - uses: mongodb-labs/drivers-github-tools/setup@v2 + with: + aws_role_arn: ${{ secrets.AWS_ROLE_ARN }} + aws_region_name: ${{ vars.AWS_REGION_NAME }} + aws_secret_id: ${{ secrets.AWS_SECRET_ID }} + artifactory_username: ${{ vars.ARTIFACTORY_USERNAME }} + - uses: mongodb-labs/drivers-github-tools/python/publish@v2 + with: + version: ${{ inputs.version }} + following_version: ${{ inputs.following_version }} + product_name: ${{ env.PRODUCT_NAME }} + silk_asset_group: ${{ env.SILK_ASSET_GROUP }} + evergreen_project: ${{ env.EVERGREEN_PROJECT }} + token: ${{ github.token }} + dry_run: ${{ inputs.dry_run }} diff --git a/.github/workflows/test-python.yml b/.github/workflows/test-python.yml index 31dc018f27..b93c93c022 100644 --- a/.github/workflows/test-python.yml +++ b/.github/workflows/test-python.yml @@ -31,12 +31,10 @@ jobs: - name: Run linters run: | tox -m lint-manual - - name: Check Manifest - run: | - tox -m manifest - name: Run compilation run: | - pip install -e . + export PYMONGO_C_EXT_MUST_BUILD=1 + pip install -v -e . python tools/fail_if_no_c.py - name: Run typecheck run: | @@ -53,7 +51,7 @@ jobs: strategy: matrix: os: [ubuntu-20.04] - python-version: ["3.7", "3.11", "pypy-3.8"] + python-version: ["3.8", "3.11", "pypy-3.9"] name: CPython ${{ matrix.python-version }}-${{ matrix.os }} steps: - uses: actions/checkout@v4 @@ -137,7 +135,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ["3.7", "3.11"] + python: ["3.8", "3.11"] steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 @@ -162,7 +160,7 @@ jobs: cache: 'pip' cache-dependency-path: 'pyproject.toml' # Build sdist on lowest supported Python - python-version: '3.7' + python-version: '3.8' - name: Build SDist shell: bash run: | @@ -194,7 +192,7 @@ jobs: cache: 'pip' cache-dependency-path: 'sdist/test/pyproject.toml' # Test sdist on lowest supported Python - python-version: '3.7' + python-version: '3.8' - name: Start MongoDB uses: supercharge/mongodb-github-action@1.10.0 - name: Run Test diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index e5d83be634..bcfc553748 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -16,7 +16,7 @@ be of interest or that has already been addressed. ## Supported Interpreters -PyMongo supports CPython 3.7+ and PyPy3.8+. Language features not +PyMongo supports CPython 3.8+ and PyPy3.9+. Language features not supported by all interpreters can not be used. ## Style Guide diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index bca389a746..0000000000 --- a/MANIFEST.in +++ /dev/null @@ -1,32 +0,0 @@ -include README.md -include LICENSE -include THIRD-PARTY-NOTICES -include *.ini -include requirements.txt -exclude .coveragerc -exclude .git-blame-ignore-revs -exclude .pre-commit-config.yaml -exclude .readthedocs.yaml -exclude CONTRIBUTING.md -exclude RELEASE.md -recursive-include doc *.rst -recursive-include doc *.py -recursive-include doc *.conf -recursive-include doc *.css -recursive-include doc *.js -recursive-include doc *.png -include doc/Makefile -include doc/_templates/layout.html -include doc/make.bat -include doc/static/periodic-executor-refs.dot -recursive-include requirements *.txt -recursive-include tools *.py -include tools/README.rst -include green_framework_test.py -recursive-include test *.pem -recursive-include test *.py -recursive-include test *.json -recursive-include bson *.h -prune test/mod_wsgi_test -prune test/lambda -prune .evergreen diff --git a/README.md b/README.md index 046b899a7b..3d13f1aa9a 100644 --- a/README.md +++ b/README.md @@ -78,12 +78,6 @@ PyMongo can be installed with [pip](http://pypi.python.org/pypi/pip): python -m pip install pymongo ``` -Or `easy_install` from [setuptools](http://pypi.python.org/pypi/setuptools): - -```bash -python -m easy_install pymongo -``` - You can also download the project source and do: ```bash @@ -96,7 +90,7 @@ package that is incompatible with PyMongo. ## Dependencies -PyMongo supports CPython 3.7+ and PyPy3.7+. +PyMongo supports CPython 3.8+ and PyPy3.9+. Required dependencies: diff --git a/_setup.py b/_setup.py new file mode 100644 index 0000000000..65ae1908fe --- /dev/null +++ b/_setup.py @@ -0,0 +1,143 @@ +from __future__ import annotations + +import os +import sys +import warnings + +# Hack to silence atexit traceback in some Python versions +try: + import multiprocessing # noqa: F401 +except ImportError: + pass + +from setuptools import setup +from setuptools.command.build_ext import build_ext +from setuptools.extension import Extension + + +class custom_build_ext(build_ext): + """Allow C extension building to fail. + + The C extension speeds up BSON encoding, but is not essential. + """ + + warning_message = """ +******************************************************************** +WARNING: %s could not +be compiled. No C extensions are essential for PyMongo to run, +although they do result in significant speed improvements. +%s + +Please see the installation docs for solutions to build issues: + +https://pymongo.readthedocs.io/en/stable/installation.html + +Here are some hints for popular operating systems: + +If you are seeing this message on Linux you probably need to +install GCC and/or the Python development package for your +version of Python. + +Debian and Ubuntu users should issue the following command: + + $ sudo apt-get install build-essential python-dev + +Users of Red Hat based distributions (RHEL, CentOS, Amazon Linux, +Oracle Linux, Fedora, etc.) should issue the following command: + + $ sudo yum install gcc python-devel + +If you are seeing this message on Microsoft Windows please install +PyMongo using pip. Modern versions of pip will install PyMongo +from binary wheels available on pypi. If you must install from +source read the documentation here: + +https://pymongo.readthedocs.io/en/stable/installation.html#installing-from-source-on-windows + +If you are seeing this message on macOS / OSX please install PyMongo +using pip. Modern versions of pip will install PyMongo from binary +wheels available on pypi. If wheels are not available for your version +of macOS / OSX, or you must install from source read the documentation +here: + +https://pymongo.readthedocs.io/en/stable/installation.html#osx +******************************************************************** +""" + + def run(self): + try: + build_ext.run(self) + except Exception: + if os.environ.get("PYMONGO_C_EXT_MUST_BUILD"): + raise + e = sys.exc_info()[1] + sys.stdout.write("%s\n" % str(e)) + warnings.warn( + self.warning_message + % ( + "Extension modules", + "There was an issue with your platform configuration - see above.", + ), + stacklevel=2, + ) + + def build_extension(self, ext): + name = ext.name + try: + build_ext.build_extension(self, ext) + except Exception: + if os.environ.get("PYMONGO_C_EXT_MUST_BUILD"): + raise + e = sys.exc_info()[1] + sys.stdout.write("%s\n" % str(e)) + warnings.warn( + self.warning_message + % ( + "The %s extension module" % (name,), # noqa: UP031 + "The output above this warning shows how the compilation failed.", + ), + stacklevel=2, + ) + + +ext_modules = [ + Extension( + "bson._cbson", + include_dirs=["bson"], + sources=["bson/_cbsonmodule.c", "bson/time64.c", "bson/buffer.c"], + ), + Extension( + "pymongo._cmessage", + include_dirs=["bson"], + sources=[ + "pymongo/_cmessagemodule.c", + "bson/_cbsonmodule.c", + "bson/time64.c", + "bson/buffer.c", + ], + ), +] + + +if "--no_ext" in sys.argv or os.environ.get("NO_EXT"): + try: + sys.argv.remove("--no_ext") + except ValueError: + pass + ext_modules = [] +elif sys.platform.startswith("java") or sys.platform == "cli" or "PyPy" in sys.version: + sys.stdout.write( + """ +*****************************************************\n +The optional C extensions are currently not supported\n +by this python implementation.\n +*****************************************************\n +""" + ) + ext_modules = [] + +setup( + cmdclass={"build_ext": custom_build_ext}, + ext_modules=ext_modules, + packages=["bson", "pymongo", "gridfs"], +) # type:ignore diff --git a/bson/binary.py b/bson/binary.py index be33464462..5fe1bacd16 100644 --- a/bson/binary.py +++ b/bson/binary.py @@ -364,4 +364,7 @@ def __ne__(self, other: Any) -> bool: return not self == other def __repr__(self) -> str: - return f"Binary({bytes.__repr__(self)}, {self.__subtype})" + if self.__subtype == SENSITIVE_SUBTYPE: + return f"" + else: + return f"Binary({bytes.__repr__(self)}, {self.__subtype})" diff --git a/doc/changelog.rst b/doc/changelog.rst index ff03742378..1935fda233 100644 --- a/doc/changelog.rst +++ b/doc/changelog.rst @@ -1,6 +1,71 @@ Changelog ========= +Changes in Version 4.8.0 +------------------------- + +.. warning:: PyMongo 4.8 drops support for Python 3.7 and PyPy 3.8: Python 3.8+ or PyPy 3.9+ is now required. + +PyMongo 4.8 brings a number of improvements including: + +- The handshake metadata for "os.name" on Windows has been simplified to "Windows" to improve import time. +- The repr of ``bson.binary.Binary`` is now redacted when the subtype is SENSITIVE_SUBTYPE(8). +- Secure Software Development Life Cycle automation for release process. + GitHub Releases now include a Software Bill of Materials, and signature + files corresponding to the distribution files released on PyPI. +- Fixed a bug in change streams where both ``startAtOperationTime`` and ``resumeToken`` + could be added to a retry attempt, which caused the retry to fail. +- Fallback to stdlib ``ssl`` module when ``pyopenssl`` import fails with AttributeError. +- Improved performance of MongoClient operations, especially when many operations are being run concurrently. + +Unavoidable breaking changes +............................ + +- Since we are now using ``hatch`` as our build backend, we no longer have a usable ``setup.py`` file + and require installation using ``pip``. Attempts to invoke the ``setup.py`` file will raise an exception. + Additionally, ``pip`` >= 21.3 is now required for editable installs. + +Issues Resolved +............... + +See the `PyMongo 4.8 release notes in JIRA`_ for the list of resolved issues +in this release. + +.. _PyMongo 4.8 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=37057 + +Changes in Version 4.7.3 +------------------------- + +Version 4.7.3 has further fixes for lazily loading modules. + +- Use deferred imports instead of importlib lazy module loading. +- Improve import time on Windows. +- Reduce verbosity of "Waiting for suitable server to become available" log message from info to debug. + +Issues Resolved +............... + +See the `PyMongo 4.7.3 release notes in JIRA`_ for the list of resolved issues +in this release. + +.. _PyMongo 4.7.3 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=39865 + +Changes in Version 4.7.2 +------------------------- + +Version 4.7.2 fixes a bug introduced in 4.7.0: + +- Fixed a bug where PyMongo could not be used with the Nuitka compiler. + +Issues Resolved +............... + +See the `PyMongo 4.7.2 release notes in JIRA`_ for the list of resolved issues +in this release. + +.. _PyMongo 4.7.2 release notes in JIRA: https://jira.mongodb.org/secure/ReleaseNote.jspa?projectId=10004&version=39710 + + Changes in Version 4.7.1 ------------------------- diff --git a/doc/contributors.rst b/doc/contributors.rst index d7f1030322..49fb2d844d 100644 --- a/doc/contributors.rst +++ b/doc/contributors.rst @@ -100,3 +100,4 @@ The following is a list of people who have contributed to - Stephan Hof (stephan-hof) - Casey Clements (caseyclements) - Ivan Lukyanchikov (ilukyanchikov) +- Terry Patterson diff --git a/doc/faq.rst b/doc/faq.rst index 2d211c756c..f0463badaa 100644 --- a/doc/faq.rst +++ b/doc/faq.rst @@ -166,7 +166,7 @@ they are returned to the pool. Does PyMongo support Python 3? ------------------------------ -PyMongo supports CPython 3.7+ and PyPy3.8+. See the :doc:`python3` for details. +PyMongo supports CPython 3.8+ and PyPy3.9+. See the :doc:`python3` for details. Does PyMongo support asynchronous frameworks like Gevent, asyncio, Tornado, or Twisted? --------------------------------------------------------------------------------------- diff --git a/doc/installation.rst b/doc/installation.rst index edbdc0ac63..ee83b30c6f 100644 --- a/doc/installation.rst +++ b/doc/installation.rst @@ -28,7 +28,7 @@ To upgrade using pip:: Dependencies ------------ -PyMongo supports CPython 3.7+ and PyPy3.7+. +PyMongo supports CPython 3.8+ and PyPy3.9+. Required dependencies ..................... @@ -140,7 +140,7 @@ See `http://bugs.python.org/issue11623 `_ for a more detailed explanation. **Lion (10.7) and newer** - PyMongo's C extensions can be built against -versions of Python 3.7+ downloaded from python.org. In all cases Xcode must be +versions of Python 3.8+ downloaded from python.org. In all cases Xcode must be installed with 'UNIX Development Support'. **Xcode 5.1**: Starting with version 5.1 the version of clang that ships with diff --git a/doc/python3.rst b/doc/python3.rst index cc11409bcf..148c5ee454 100644 --- a/doc/python3.rst +++ b/doc/python3.rst @@ -4,7 +4,7 @@ Python 3 FAQ What Python 3 versions are supported? ------------------------------------- -PyMongo supports CPython 3.7+ and PyPy3.8+. +PyMongo supports CPython 3.8+ and PyPy3.9+. Are there any PyMongo behavior changes with Python 3? ----------------------------------------------------- diff --git a/hatch_build.py b/hatch_build.py new file mode 100644 index 0000000000..91315eb09f --- /dev/null +++ b/hatch_build.py @@ -0,0 +1,36 @@ +"""A custom hatch build hook for pymongo.""" +from __future__ import annotations + +import os +import subprocess +import sys +from pathlib import Path + +from hatchling.builders.hooks.plugin.interface import BuildHookInterface + + +class CustomHook(BuildHookInterface): + """The pymongo build hook.""" + + def initialize(self, version, build_data): + """Initialize the hook.""" + if self.target_name == "sdist": + return + here = Path(__file__).parent.resolve() + sys.path.insert(0, str(here)) + + subprocess.check_call([sys.executable, "_setup.py", "build_ext", "-i"]) + + # Ensure wheel is marked as binary and contains the binary files. + build_data["infer_tag"] = True + build_data["pure_python"] = False + if os.name == "nt": + patt = ".pyd" + else: + patt = ".so" + for pkg in ["bson", "pymongo"]: + dpath = here / pkg + for fpath in dpath.glob(f"*{patt}"): + relpath = os.path.relpath(fpath, here) + build_data["artifacts"].append(relpath) + build_data["force_include"][relpath] = relpath diff --git a/pymongo/_gcp_helpers.py b/pymongo/_gcp_helpers.py index 46f02ba1e5..d90f3cc217 100644 --- a/pymongo/_gcp_helpers.py +++ b/pymongo/_gcp_helpers.py @@ -16,10 +16,11 @@ from __future__ import annotations from typing import Any -from urllib.request import Request, urlopen def _get_gcp_response(resource: str, timeout: float = 5) -> dict[str, Any]: + from urllib.request import Request, urlopen + url = "http://metadata/computeMetadata/v1/instance/service-accounts/default/identity" url += f"?audience={resource}" headers = {"Metadata-Flavor": "Google"} diff --git a/pymongo/_lazy_import.py b/pymongo/_lazy_import.py deleted file mode 100644 index 6e6d59a028..0000000000 --- a/pymongo/_lazy_import.py +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright 2024-present MongoDB, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); you -# may not use this file except in compliance with the License. You -# may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -# implied. See the License for the specific language governing -# permissions and limitations under the License. -from __future__ import annotations - -import importlib.util -import sys -from types import ModuleType - - -def lazy_import(name: str) -> ModuleType: - """Lazily import a module by name - - From https://docs.python.org/3/library/importlib.html#implementing-lazy-imports - """ - try: - spec = importlib.util.find_spec(name) - except ValueError: - raise ModuleNotFoundError(name=name) from None - if spec is None: - raise ModuleNotFoundError(name=name) - assert spec is not None - loader = importlib.util.LazyLoader(spec.loader) # type:ignore[arg-type] - spec.loader = loader - module = importlib.util.module_from_spec(spec) - sys.modules[name] = module - loader.exec_module(module) - return module diff --git a/pymongo/_version.py b/pymongo/_version.py index de8933e30c..b89e98d5be 100644 --- a/pymongo/_version.py +++ b/pymongo/_version.py @@ -15,16 +15,29 @@ """Current version of PyMongo.""" from __future__ import annotations -from typing import Tuple, Union +import re +from typing import List, Tuple, Union -version_tuple: Tuple[Union[int, str], ...] = (4, 7, 1) +__version__ = "4.8.0" -def get_version_string() -> str: - if isinstance(version_tuple[-1], str): - return ".".join(map(str, version_tuple[:-1])) + version_tuple[-1] - return ".".join(map(str, version_tuple)) +def get_version_tuple(version: str) -> Tuple[Union[int, str], ...]: + pattern = r"(?P\d+).(?P\d+).(?P\d+)(?P.*)" + match = re.match(pattern, version) + if match: + parts: List[Union[int, str]] = [int(match[part]) for part in ["major", "minor", "patch"]] + if match["rest"]: + parts.append(match["rest"]) + elif re.match(r"\d+.\d+", version): + parts = [int(part) for part in version.split(".")] + else: + raise ValueError("Could not parse version") + return tuple(parts) -__version__: str = get_version_string() +version_tuple = get_version_tuple(__version__) version = __version__ + + +def get_version_string() -> str: + return __version__ diff --git a/pymongo/auth_aws.py b/pymongo/auth_aws.py index 0d253cea13..042eee5a73 100644 --- a/pymongo/auth_aws.py +++ b/pymongo/auth_aws.py @@ -15,15 +15,6 @@ """MONGODB-AWS Authentication helpers.""" from __future__ import annotations -from pymongo._lazy_import import lazy_import - -try: - pymongo_auth_aws = lazy_import("pymongo_auth_aws") - _HAVE_MONGODB_AWS = True -except ImportError: - _HAVE_MONGODB_AWS = False - - from typing import TYPE_CHECKING, Any, Mapping, Type import bson @@ -38,11 +29,13 @@ def _authenticate_aws(credentials: MongoCredential, conn: Connection) -> None: """Authenticate using MONGODB-AWS.""" - if not _HAVE_MONGODB_AWS: + try: + import pymongo_auth_aws # type:ignore[import] + except ImportError as e: raise ConfigurationError( "MONGODB-AWS authentication requires pymongo-auth-aws: " "install with: python -m pip install 'pymongo[aws]'" - ) + ) from e # Delayed import. from pymongo_auth_aws.auth import ( # type:ignore[import] diff --git a/pymongo/change_stream.py b/pymongo/change_stream.py index dc2f6bf2c5..300bd88e92 100644 --- a/pymongo/change_stream.py +++ b/pymongo/change_stream.py @@ -179,8 +179,7 @@ def _change_stream_options(self) -> dict[str, Any]: options["startAfter"] = resume_token else: options["resumeAfter"] = resume_token - - if self._start_at_operation_time is not None: + elif self._start_at_operation_time is not None: options["startAtOperationTime"] = self._start_at_operation_time if self._show_expanded_events: diff --git a/pymongo/client_options.py b/pymongo/client_options.py index 60332605a3..9c745b11ef 100644 --- a/pymongo/client_options.py +++ b/pymongo/client_options.py @@ -19,7 +19,6 @@ from bson.codec_options import _parse_codec_options from pymongo import common -from pymongo.auth import MongoCredential, _build_credentials_tuple from pymongo.compression_support import CompressionSettings from pymongo.errors import ConfigurationError from pymongo.monitoring import _EventListener, _EventListeners @@ -36,6 +35,7 @@ if TYPE_CHECKING: from bson.codec_options import CodecOptions + from pymongo.auth import MongoCredential from pymongo.encryption_options import AutoEncryptionOpts from pymongo.pyopenssl_context import SSLContext from pymongo.topology_description import _ServerSelector @@ -48,6 +48,8 @@ def _parse_credentials( mechanism = options.get("authmechanism", "DEFAULT" if username else None) source = options.get("authsource") if username or mechanism: + from pymongo.auth import _build_credentials_tuple + return _build_credentials_tuple(mechanism, source, username, password, options, database) return None diff --git a/pymongo/client_session.py b/pymongo/client_session.py index 3efc624c04..7dd1996afd 100644 --- a/pymongo/client_session.py +++ b/pymongo/client_session.py @@ -515,9 +515,6 @@ def end_session(self) -> None: It is an error to use the session after the session has ended. """ - self._end_session(lock=True) - - def _end_session(self, lock: bool) -> None: if self._server_session is not None: try: if self.in_transaction: @@ -526,7 +523,7 @@ def _end_session(self, lock: bool) -> None: # is in the committed state when the session is discarded. self._unpin() finally: - self._client._return_server_session(self._server_session, lock) + self._client._return_server_session(self._server_session) self._server_session = None def _check_ended(self) -> None: @@ -537,7 +534,7 @@ def __enter__(self) -> ClientSession: return self def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None: - self._end_session(lock=True) + self.end_session() @property def client(self) -> MongoClient: @@ -1097,7 +1094,7 @@ def inc_transaction_id(self) -> None: class _ServerSessionPool(collections.deque): """Pool of _ServerSession objects. - This class is not thread-safe, access it while holding the Topology lock. + This class is thread-safe. """ def __init__(self, *args: Any, **kwargs: Any): @@ -1110,8 +1107,11 @@ def reset(self) -> None: def pop_all(self) -> list[_ServerSession]: ids = [] - while self: - ids.append(self.pop().session_id) + while True: + try: + ids.append(self.pop().session_id) + except IndexError: + break return ids def get_server_session(self, session_timeout_minutes: Optional[int]) -> _ServerSession: @@ -1123,23 +1123,17 @@ def get_server_session(self, session_timeout_minutes: Optional[int]) -> _ServerS self._clear_stale(session_timeout_minutes) # The most recently used sessions are on the left. - while self: - s = self.popleft() + while True: + try: + s = self.popleft() + except IndexError: + break if not s.timed_out(session_timeout_minutes): return s return _ServerSession(self.generation) - def return_server_session( - self, server_session: _ServerSession, session_timeout_minutes: Optional[int] - ) -> None: - if session_timeout_minutes is not None: - self._clear_stale(session_timeout_minutes) - if server_session.timed_out(session_timeout_minutes): - return - self.return_server_session_no_lock(server_session) - - def return_server_session_no_lock(self, server_session: _ServerSession) -> None: + def return_server_session(self, server_session: _ServerSession) -> None: # Discard sessions from an old pool to avoid duplicate sessions in the # child process after a fork. if server_session.generation == self.generation and not server_session.dirty: @@ -1147,9 +1141,12 @@ def return_server_session_no_lock(self, server_session: _ServerSession) -> None: def _clear_stale(self, session_timeout_minutes: Optional[int]) -> None: # Clear stale sessions. The least recently used are on the right. - while self: - if self[-1].timed_out(session_timeout_minutes): - self.pop() - else: + while True: + try: + s = self.pop() + except IndexError: + break + if not s.timed_out(session_timeout_minutes): + self.append(s) # The remaining sessions also haven't timed out. break diff --git a/pymongo/command_cursor.py b/pymongo/command_cursor.py index 0411a45abe..6d48a87824 100644 --- a/pymongo/command_cursor.py +++ b/pymongo/command_cursor.py @@ -73,7 +73,7 @@ def __init__( self.__killed = self.__id == 0 self.__comment = comment if self.__killed: - self.__end_session(True) + self.__end_session() if "ns" in cursor_info: # noqa: SIM401 self.__ns = cursor_info["ns"] @@ -112,9 +112,9 @@ def __die(self, synchronous: bool = False) -> None: self.__session = None self.__sock_mgr = None - def __end_session(self, synchronous: bool) -> None: + def __end_session(self) -> None: if self.__session and not self.__explicit_session: - self.__session._end_session(lock=synchronous) + self.__session.end_session() self.__session = None def close(self) -> None: diff --git a/pymongo/common.py b/pymongo/common.py index 7f1245b7d3..57560a7b0d 100644 --- a/pymongo/common.py +++ b/pymongo/common.py @@ -40,8 +40,6 @@ from bson.binary import UuidRepresentation from bson.codec_options import CodecOptions, DatetimeConversion, TypeRegistry from bson.raw_bson import RawBSONDocument -from pymongo.auth import MECHANISMS -from pymongo.auth_oidc import OIDCCallback from pymongo.compression_support import ( validate_compressors, validate_zlib_compression_level, @@ -380,6 +378,8 @@ def validate_read_preference_mode(dummy: Any, value: Any) -> _ServerMode: def validate_auth_mechanism(option: str, value: Any) -> str: """Validate the authMechanism URI option.""" + from pymongo.auth import MECHANISMS + if value not in MECHANISMS: raise ValueError(f"{option} must be in {tuple(MECHANISMS)}") return value @@ -444,6 +444,8 @@ def validate_auth_mechanism_properties(option: str, value: Any) -> dict[str, Uni elif key in ["ALLOWED_HOSTS"] and isinstance(value, list): props[key] = value elif key in ["OIDC_CALLBACK", "OIDC_HUMAN_CALLBACK"]: + from pymongo.auth_oidc import OIDCCallback + if not isinstance(value, OIDCCallback): raise ValueError("callback must be an OIDCCallback object") props[key] = value @@ -452,8 +454,11 @@ def validate_auth_mechanism_properties(option: str, value: Any) -> dict[str, Uni return props value = validate_string(option, value) + value = unquote_plus(value) for opt in value.split(","): key, _, val = opt.partition(":") + if not val: + raise ValueError("Malformed auth mechanism properties") if key not in _MECHANISM_PROPS: # Try not to leak the token. if "AWS_SESSION_TOKEN" in key: @@ -471,7 +476,7 @@ def validate_auth_mechanism_properties(option: str, value: Any) -> dict[str, Uni if key == "CANONICALIZE_HOST_NAME": props[key] = validate_boolean_or_string(key, val) else: - props[key] = unquote_plus(val) + props[key] = val return props diff --git a/pymongo/compression_support.py b/pymongo/compression_support.py index 7daad21046..2f155352d2 100644 --- a/pymongo/compression_support.py +++ b/pymongo/compression_support.py @@ -16,34 +16,39 @@ import warnings from typing import Any, Iterable, Optional, Union -from pymongo._lazy_import import lazy_import from pymongo.hello import HelloCompat -from pymongo.monitoring import _SENSITIVE_COMMANDS +from pymongo.helpers import _SENSITIVE_COMMANDS -try: - snappy = lazy_import("snappy") - _HAVE_SNAPPY = True -except ImportError: - # python-snappy isn't available. - _HAVE_SNAPPY = False +_SUPPORTED_COMPRESSORS = {"snappy", "zlib", "zstd"} +_NO_COMPRESSION = {HelloCompat.CMD, HelloCompat.LEGACY_CMD} +_NO_COMPRESSION.update(_SENSITIVE_COMMANDS) -try: - zlib = lazy_import("zlib") - _HAVE_ZLIB = True -except ImportError: - # Python built without zlib support. - _HAVE_ZLIB = False +def _have_snappy() -> bool: + try: + import snappy # type:ignore[import] # noqa: F401 -try: - zstandard = lazy_import("zstandard") - _HAVE_ZSTD = True -except ImportError: - _HAVE_ZSTD = False + return True + except ImportError: + return False -_SUPPORTED_COMPRESSORS = {"snappy", "zlib", "zstd"} -_NO_COMPRESSION = {HelloCompat.CMD, HelloCompat.LEGACY_CMD} -_NO_COMPRESSION.update(_SENSITIVE_COMMANDS) + +def _have_zlib() -> bool: + try: + import zlib # noqa: F401 + + return True + except ImportError: + return False + + +def _have_zstd() -> bool: + try: + import zstandard # noqa: F401 + + return True + except ImportError: + return False def validate_compressors(dummy: Any, value: Union[str, Iterable[str]]) -> list[str]: @@ -58,21 +63,21 @@ def validate_compressors(dummy: Any, value: Union[str, Iterable[str]]) -> list[s if compressor not in _SUPPORTED_COMPRESSORS: compressors.remove(compressor) warnings.warn(f"Unsupported compressor: {compressor}", stacklevel=2) - elif compressor == "snappy" and not _HAVE_SNAPPY: + elif compressor == "snappy" and not _have_snappy(): compressors.remove(compressor) warnings.warn( "Wire protocol compression with snappy is not available. " "You must install the python-snappy module for snappy support.", stacklevel=2, ) - elif compressor == "zlib" and not _HAVE_ZLIB: + elif compressor == "zlib" and not _have_zlib(): compressors.remove(compressor) warnings.warn( "Wire protocol compression with zlib is not available. " "The zlib module is not available.", stacklevel=2, ) - elif compressor == "zstd" and not _HAVE_ZSTD: + elif compressor == "zstd" and not _have_zstd(): compressors.remove(compressor) warnings.warn( "Wire protocol compression with zstandard is not available. " @@ -117,6 +122,8 @@ class SnappyContext: @staticmethod def compress(data: bytes) -> bytes: + import snappy + return snappy.compress(data) @@ -127,6 +134,8 @@ def __init__(self, level: int): self.level = level def compress(self, data: bytes) -> bytes: + import zlib + return zlib.compress(data, self.level) @@ -137,6 +146,8 @@ class ZstdContext: def compress(data: bytes) -> bytes: # ZstdCompressor is not thread safe. # TODO: Use a pool? + import zstandard + return zstandard.ZstdCompressor().compress(data) @@ -146,12 +157,18 @@ def decompress(data: bytes, compressor_id: int) -> bytes: # https://github.com/andrix/python-snappy/issues/65 # This only matters when data is a memoryview since # id(bytes(data)) == id(data) when data is a bytes. + import snappy + return snappy.uncompress(bytes(data)) elif compressor_id == ZlibContext.compressor_id: + import zlib + return zlib.decompress(data) elif compressor_id == ZstdContext.compressor_id: # ZstdDecompressor is not thread safe. # TODO: Use a pool? + import zstandard + return zstandard.ZstdDecompressor().decompress(data) else: raise ValueError("Unknown compressorId %d" % (compressor_id,)) diff --git a/pymongo/helpers.py b/pymongo/helpers.py index 916d78a33b..080c3204a4 100644 --- a/pymongo/helpers.py +++ b/pymongo/helpers.py @@ -93,6 +93,21 @@ # Server code raised when authentication fails. _AUTHENTICATION_FAILURE_CODE: int = 18 +# Note - to avoid bugs from forgetting which if these is all lowercase and +# which are camelCase, and at the same time avoid having to add a test for +# every command, use all lowercase here and test against command_name.lower(). +_SENSITIVE_COMMANDS: set = { + "authenticate", + "saslstart", + "saslcontinue", + "getnonce", + "createuser", + "updateuser", + "copydbgetnonce", + "copydbsaslstart", + "copydb", +} + def _gen_index_name(keys: _IndexList) -> str: """Generate an index name from the set of fields it is over.""" diff --git a/pymongo/mongo_client.py b/pymongo/mongo_client.py index f2076b0877..89d61500ca 100644 --- a/pymongo/mongo_client.py +++ b/pymongo/mongo_client.py @@ -862,6 +862,7 @@ def __init__( server_monitoring_mode=options.server_monitoring_mode, ) + self._opened = False self._init_background() if connect: @@ -903,10 +904,13 @@ def target() -> bool: # this closure. When the client is freed, stop the executor soon. self_ref: Any = weakref.ref(self, executor.close) self._kill_cursors_executor = executor + self._opened = False def _after_fork(self) -> None: """Resets topology in a child after successfully forking.""" self._init_background(self._topology._pid) + # Reset the session pool to avoid duplicate sessions in the child process. + self._topology._session_pool.reset() def _duplicate(self, **kwargs: Any) -> MongoClient: args = self.__init_kwargs.copy() @@ -1243,9 +1247,11 @@ def _get_topology(self) -> Topology: If this client was created with "connect=False", calling _get_topology launches the connection process in the background. """ - self._topology.open() - with self.__lock: - self._kill_cursors_executor.open() + if not self._opened: + self._topology.open() + with self.__lock: + self._kill_cursors_executor.open() + self._opened = True return self._topology @contextlib.contextmanager @@ -1344,8 +1350,9 @@ def _conn_from_server( # always send primaryPreferred when directly connected to a repl set # member. # Thread safe: if the type is single it cannot change. - topology = self._get_topology() - single = topology.description.topology_type == TOPOLOGY_TYPE.Single + # NOTE: We already opened the Topology when selecting a server so there's no need + # to call _get_topology() again. + single = self._topology.description.topology_type == TOPOLOGY_TYPE.Single with self._checkout(server, session) as conn: if single: @@ -1365,7 +1372,6 @@ def _conn_for_reads( operation: str, ) -> ContextManager[tuple[Connection, _ServerMode]]: assert read_preference is not None, "read_preference must not be None" - _ = self._get_topology() server = self._select_server(read_preference, session, operation) return self._conn_from_server(read_preference, server, session) @@ -1679,7 +1685,7 @@ def _cleanup_cursor( if cursor_id or conn_mgr: self._close_cursor_soon(cursor_id, address, conn_mgr) if session and not explicit_session: - session._end_session(lock=locks_allowed) + session.end_session() def _close_cursor_soon( self, @@ -1838,12 +1844,12 @@ def start_session( ) def _return_server_session( - self, server_session: Union[_ServerSession, _EmptyServerSession], lock: bool + self, server_session: Union[_ServerSession, _EmptyServerSession] ) -> None: """Internal: return a _ServerSession to the pool.""" if isinstance(server_session, _EmptyServerSession): return None - return self._topology.return_server_session(server_session, lock) + return self._topology.return_server_session(server_session) def _ensure_session(self, session: Optional[ClientSession] = None) -> Optional[ClientSession]: """If provided session is None, lend a temporary session.""" diff --git a/pymongo/monitoring.py b/pymongo/monitoring.py index aff11a9f42..896a747e72 100644 --- a/pymongo/monitoring.py +++ b/pymongo/monitoring.py @@ -191,7 +191,7 @@ def connection_checked_in(self, event): from bson.objectid import ObjectId from pymongo.hello import Hello, HelloCompat -from pymongo.helpers import _handle_exception +from pymongo.helpers import _SENSITIVE_COMMANDS, _handle_exception from pymongo.typings import _Address, _DocumentOut if TYPE_CHECKING: @@ -507,22 +507,6 @@ def register(listener: _EventListener) -> None: _LISTENERS.cmap_listeners.append(listener) -# Note - to avoid bugs from forgetting which if these is all lowercase and -# which are camelCase, and at the same time avoid having to add a test for -# every command, use all lowercase here and test against command_name.lower(). -_SENSITIVE_COMMANDS: set = { - "authenticate", - "saslstart", - "saslcontinue", - "getnonce", - "createuser", - "updateuser", - "copydbgetnonce", - "copydbsaslstart", - "copydb", -} - - # The "hello" command is also deemed sensitive when attempting speculative # authentication. def _is_speculative_authenticate(command_name: str, doc: Mapping[str, Any]) -> bool: diff --git a/pymongo/pool.py b/pymongo/pool.py index 6a8cb54b90..379127deee 100644 --- a/pymongo/pool.py +++ b/pymongo/pool.py @@ -41,7 +41,7 @@ import bson from bson import DEFAULT_CODEC_OPTIONS -from pymongo import __version__, _csot, auth, helpers +from pymongo import __version__, _csot, helpers from pymongo.client_session import _validate_session_write_concern from pymongo.common import ( MAX_BSON_SIZE, @@ -211,13 +211,14 @@ def _set_keepalive_times(sock: socket.socket) -> None: "version": platform.mac_ver()[0], } elif sys.platform == "win32": + _ver = sys.getwindowsversion() _METADATA["os"] = { - "type": platform.system(), - # "Windows XP", "Windows 7", "Windows 10", etc. - "name": " ".join((platform.system(), platform.release())), - "architecture": platform.machine(), - # Windows patch level (e.g. 5.1.2600-SP3) - "version": "-".join(platform.win32_ver()[1:3]), + "type": "Windows", + "name": "Windows", + # Avoid using platform calls, see PYTHON-4455. + "architecture": os.environ.get("PROCESSOR_ARCHITECTURE") or platform.machine(), + # Windows patch level (e.g. 10.0.17763-SP0). + "version": ".".join(map(str, _ver[:3])) + f"-SP{_ver[-1] or '0'}", } elif sys.platform.startswith("java"): _name, _ver, _arch = platform.java_ver()[-1] @@ -538,7 +539,7 @@ def __init__( # 'name': 'PyMongo|MyDriver', # 'version': '4.2.0|1.2.3', # }, - # 'platform': 'CPython 3.7.0|MyPlatform' + # 'platform': 'CPython 3.8.0|MyPlatform' # } if driver: if driver.name: @@ -733,6 +734,7 @@ def __init__( self.op_msg_enabled = False self.listeners = pool.opts._event_listeners self.enabled_for_cmap = pool.enabled_for_cmap + self.enabled_for_logging = pool.enabled_for_logging self.compression_settings = pool.opts._compression_settings self.compression_context: Union[SnappyContext, ZlibContext, ZstdContext, None] = None self.socket_checker: SocketChecker = SocketChecker() @@ -859,6 +861,8 @@ def _hello( if creds: if creds.mechanism == "DEFAULT" and creds.username: cmd["saslSupportedMechs"] = creds.source + "." + creds.username + from pymongo import auth + auth_ctx = auth._AuthContext.from_credentials(creds, self.address) if auth_ctx: speculative_authenticate = auth_ctx.speculate_command() @@ -1090,22 +1094,24 @@ def authenticate(self, reauthenticate: bool = False) -> None: if not self.ready: creds = self.opts._credentials if creds: + from pymongo import auth + auth.authenticate(creds, self, reauthenticate=reauthenticate) self.ready = True + duration = time.monotonic() - self.creation_time if self.enabled_for_cmap: assert self.listeners is not None - duration = time.monotonic() - self.creation_time self.listeners.publish_connection_ready(self.address, self.id, duration) - if _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): - _debug_log( - _CONNECTION_LOGGER, - clientId=self._client_id, - message=_ConnectionStatusMessage.CONN_READY, - serverHost=self.address[0], - serverPort=self.address[1], - driverConnectionId=self.id, - durationMS=duration, - ) + if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): + _debug_log( + _CONNECTION_LOGGER, + clientId=self._client_id, + message=_ConnectionStatusMessage.CONN_READY, + serverHost=self.address[0], + serverPort=self.address[1], + driverConnectionId=self.id, + durationMS=duration, + ) def validate_session( self, client: Optional[MongoClient], session: Optional[ClientSession] @@ -1123,10 +1129,11 @@ def close_conn(self, reason: Optional[str]) -> None: if self.closed: return self._close_conn() - if reason and self.enabled_for_cmap: - assert self.listeners is not None - self.listeners.publish_connection_closed(self.address, self.id, reason) - if _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): + if reason: + if self.enabled_for_cmap: + assert self.listeners is not None + self.listeners.publish_connection_closed(self.address, self.id, reason) + if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _CONNECTION_LOGGER, clientId=self._client_id, @@ -1436,6 +1443,7 @@ def __init__( and self.opts._event_listeners is not None and self.opts._event_listeners.enabled_for_cmap ) + self.enabled_for_logging = self.handshake # The first portion of the wait queue. # Enforces: maxPoolSize @@ -1457,15 +1465,15 @@ def __init__( self.opts._event_listeners.publish_pool_created( self.address, self.opts.non_default_options ) - if _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): - _debug_log( - _CONNECTION_LOGGER, - clientId=self._client_id, - message=_ConnectionStatusMessage.POOL_CREATED, - serverHost=self.address[0], - serverPort=self.address[1], - **self.opts.non_default_options, - ) + if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): + _debug_log( + _CONNECTION_LOGGER, + clientId=self._client_id, + message=_ConnectionStatusMessage.POOL_CREATED, + serverHost=self.address[0], + serverPort=self.address[1], + **self.opts.non_default_options, + ) # Similar to active_sockets but includes threads in the wait queue. self.operation_count: int = 0 # Retain references to pinned connections to prevent the CPython GC @@ -1483,14 +1491,14 @@ def ready(self) -> None: if self.enabled_for_cmap: assert self.opts._event_listeners is not None self.opts._event_listeners.publish_pool_ready(self.address) - if _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): - _debug_log( - _CONNECTION_LOGGER, - clientId=self._client_id, - message=_ConnectionStatusMessage.POOL_READY, - serverHost=self.address[0], - serverPort=self.address[1], - ) + if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): + _debug_log( + _CONNECTION_LOGGER, + clientId=self._client_id, + message=_ConnectionStatusMessage.POOL_READY, + serverHost=self.address[0], + serverPort=self.address[1], + ) @property def closed(self) -> bool: @@ -1548,23 +1556,24 @@ def _reset( if self.enabled_for_cmap: assert listeners is not None listeners.publish_pool_closed(self.address) - if _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): - _debug_log( - _CONNECTION_LOGGER, - clientId=self._client_id, - message=_ConnectionStatusMessage.POOL_CLOSED, - serverHost=self.address[0], - serverPort=self.address[1], - ) - else: - if old_state != PoolState.PAUSED and self.enabled_for_cmap: - assert listeners is not None - listeners.publish_pool_cleared( - self.address, - service_id=service_id, - interrupt_connections=interrupt_connections, + if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): + _debug_log( + _CONNECTION_LOGGER, + clientId=self._client_id, + message=_ConnectionStatusMessage.POOL_CLOSED, + serverHost=self.address[0], + serverPort=self.address[1], ) - if _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): + else: + if old_state != PoolState.PAUSED: + if self.enabled_for_cmap: + assert listeners is not None + listeners.publish_pool_cleared( + self.address, + service_id=service_id, + interrupt_connections=interrupt_connections, + ) + if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _CONNECTION_LOGGER, clientId=self._client_id, @@ -1672,15 +1681,15 @@ def connect(self, handler: Optional[_MongoClientErrorHandler] = None) -> Connect if self.enabled_for_cmap: assert listeners is not None listeners.publish_connection_created(self.address, conn_id) - if _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): - _debug_log( - _CONNECTION_LOGGER, - clientId=self._client_id, - message=_ConnectionStatusMessage.CONN_CREATED, - serverHost=self.address[0], - serverPort=self.address[1], - driverConnectionId=conn_id, - ) + if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): + _debug_log( + _CONNECTION_LOGGER, + clientId=self._client_id, + message=_ConnectionStatusMessage.CONN_CREATED, + serverHost=self.address[0], + serverPort=self.address[1], + driverConnectionId=conn_id, + ) try: sock = _configured_socket(self.address, self.opts) @@ -1690,17 +1699,17 @@ def connect(self, handler: Optional[_MongoClientErrorHandler] = None) -> Connect listeners.publish_connection_closed( self.address, conn_id, ConnectionClosedReason.ERROR ) - if _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): - _debug_log( - _CONNECTION_LOGGER, - clientId=self._client_id, - message=_ConnectionStatusMessage.CONN_CLOSED, - serverHost=self.address[0], - serverPort=self.address[1], - driverConnectionId=conn_id, - reason=_verbose_connection_error_reason(ConnectionClosedReason.ERROR), - error=ConnectionClosedReason.ERROR, - ) + if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): + _debug_log( + _CONNECTION_LOGGER, + clientId=self._client_id, + message=_ConnectionStatusMessage.CONN_CLOSED, + serverHost=self.address[0], + serverPort=self.address[1], + driverConnectionId=conn_id, + reason=_verbose_connection_error_reason(ConnectionClosedReason.ERROR), + error=ConnectionClosedReason.ERROR, + ) if isinstance(error, (IOError, OSError, SSLError)): details = _get_timeout_details(self.opts) _raise_connection_failure(self.address, error, timeout_details=details) @@ -1746,31 +1755,31 @@ def checkout(self, handler: Optional[_MongoClientErrorHandler] = None) -> Iterat if self.enabled_for_cmap: assert listeners is not None listeners.publish_connection_check_out_started(self.address) - if _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): - _debug_log( - _CONNECTION_LOGGER, - clientId=self._client_id, - message=_ConnectionStatusMessage.CHECKOUT_STARTED, - serverHost=self.address[0], - serverPort=self.address[1], - ) + if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): + _debug_log( + _CONNECTION_LOGGER, + clientId=self._client_id, + message=_ConnectionStatusMessage.CHECKOUT_STARTED, + serverHost=self.address[0], + serverPort=self.address[1], + ) conn = self._get_conn(checkout_started_time, handler=handler) + duration = time.monotonic() - checkout_started_time if self.enabled_for_cmap: assert listeners is not None - duration = time.monotonic() - checkout_started_time listeners.publish_connection_checked_out(self.address, conn.id, duration) - if _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): - _debug_log( - _CONNECTION_LOGGER, - clientId=self._client_id, - message=_ConnectionStatusMessage.CHECKOUT_SUCCEEDED, - serverHost=self.address[0], - serverPort=self.address[1], - driverConnectionId=conn.id, - durationMS=duration, - ) + if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): + _debug_log( + _CONNECTION_LOGGER, + clientId=self._client_id, + message=_ConnectionStatusMessage.CHECKOUT_SUCCEEDED, + serverHost=self.address[0], + serverPort=self.address[1], + driverConnectionId=conn.id, + durationMS=duration, + ) try: with self.lock: self.active_contexts.add(conn.cancel_context) @@ -1802,13 +1811,14 @@ def checkout(self, handler: Optional[_MongoClientErrorHandler] = None) -> Iterat def _raise_if_not_ready(self, checkout_started_time: float, emit_event: bool) -> None: if self.state != PoolState.READY: - if self.enabled_for_cmap and emit_event: - assert self.opts._event_listeners is not None + if emit_event: duration = time.monotonic() - checkout_started_time - self.opts._event_listeners.publish_connection_check_out_failed( - self.address, ConnectionCheckOutFailedReason.CONN_ERROR, duration - ) - if _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): + if self.enabled_for_cmap: + assert self.opts._event_listeners is not None + self.opts._event_listeners.publish_connection_check_out_failed( + self.address, ConnectionCheckOutFailedReason.CONN_ERROR, duration + ) + if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _CONNECTION_LOGGER, clientId=self._client_id, @@ -1836,23 +1846,23 @@ def _get_conn( self.reset_without_pause() if self.closed: + duration = time.monotonic() - checkout_started_time if self.enabled_for_cmap: assert self.opts._event_listeners is not None - duration = time.monotonic() - checkout_started_time self.opts._event_listeners.publish_connection_check_out_failed( self.address, ConnectionCheckOutFailedReason.POOL_CLOSED, duration ) - if _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): - _debug_log( - _CONNECTION_LOGGER, - clientId=self._client_id, - message=_ConnectionStatusMessage.CHECKOUT_FAILED, - serverHost=self.address[0], - serverPort=self.address[1], - reason="Connection pool was closed", - error=ConnectionCheckOutFailedReason.POOL_CLOSED, - durationMS=duration, - ) + if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): + _debug_log( + _CONNECTION_LOGGER, + clientId=self._client_id, + message=_ConnectionStatusMessage.CHECKOUT_FAILED, + serverHost=self.address[0], + serverPort=self.address[1], + reason="Connection pool was closed", + error=ConnectionCheckOutFailedReason.POOL_CLOSED, + durationMS=duration, + ) raise _PoolClosedError( "Attempted to check out a connection from closed connection pool" ) @@ -1928,13 +1938,14 @@ def _get_conn( self.active_sockets -= 1 self.size_cond.notify() - if self.enabled_for_cmap and not emitted_event: - assert self.opts._event_listeners is not None + if not emitted_event: duration = time.monotonic() - checkout_started_time - self.opts._event_listeners.publish_connection_check_out_failed( - self.address, ConnectionCheckOutFailedReason.CONN_ERROR, duration - ) - if _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): + if self.enabled_for_cmap: + assert self.opts._event_listeners is not None + self.opts._event_listeners.publish_connection_check_out_failed( + self.address, ConnectionCheckOutFailedReason.CONN_ERROR, duration + ) + if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): _debug_log( _CONNECTION_LOGGER, clientId=self._client_id, @@ -1967,15 +1978,15 @@ def checkin(self, conn: Connection) -> None: if self.enabled_for_cmap: assert listeners is not None listeners.publish_connection_checked_in(self.address, conn.id) - if _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): - _debug_log( - _CONNECTION_LOGGER, - clientId=self._client_id, - message=_ConnectionStatusMessage.CHECKEDIN, - serverHost=self.address[0], - serverPort=self.address[1], - driverConnectionId=conn.id, - ) + if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): + _debug_log( + _CONNECTION_LOGGER, + clientId=self._client_id, + message=_ConnectionStatusMessage.CHECKEDIN, + serverHost=self.address[0], + serverPort=self.address[1], + driverConnectionId=conn.id, + ) if self.pid != os.getpid(): self.reset_without_pause() else: @@ -1988,17 +1999,17 @@ def checkin(self, conn: Connection) -> None: listeners.publish_connection_closed( self.address, conn.id, ConnectionClosedReason.ERROR ) - if _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): - _debug_log( - _CONNECTION_LOGGER, - clientId=self._client_id, - message=_ConnectionStatusMessage.CONN_CLOSED, - serverHost=self.address[0], - serverPort=self.address[1], - driverConnectionId=conn.id, - reason=_verbose_connection_error_reason(ConnectionClosedReason.ERROR), - error=ConnectionClosedReason.ERROR, - ) + if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): + _debug_log( + _CONNECTION_LOGGER, + clientId=self._client_id, + message=_ConnectionStatusMessage.CONN_CLOSED, + serverHost=self.address[0], + serverPort=self.address[1], + driverConnectionId=conn.id, + reason=_verbose_connection_error_reason(ConnectionClosedReason.ERROR), + error=ConnectionClosedReason.ERROR, + ) else: with self.lock: # Hold the lock to ensure this section does not race with @@ -2060,23 +2071,23 @@ def _perished(self, conn: Connection) -> bool: def _raise_wait_queue_timeout(self, checkout_started_time: float) -> NoReturn: listeners = self.opts._event_listeners + duration = time.monotonic() - checkout_started_time if self.enabled_for_cmap: assert listeners is not None - duration = time.monotonic() - checkout_started_time listeners.publish_connection_check_out_failed( self.address, ConnectionCheckOutFailedReason.TIMEOUT, duration ) - if _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): - _debug_log( - _CONNECTION_LOGGER, - clientId=self._client_id, - message=_ConnectionStatusMessage.CHECKOUT_FAILED, - serverHost=self.address[0], - serverPort=self.address[1], - reason="Wait queue timeout elapsed without a connection becoming available", - error=ConnectionCheckOutFailedReason.TIMEOUT, - durationMS=duration, - ) + if self.enabled_for_logging and _CONNECTION_LOGGER.isEnabledFor(logging.DEBUG): + _debug_log( + _CONNECTION_LOGGER, + clientId=self._client_id, + message=_ConnectionStatusMessage.CHECKOUT_FAILED, + serverHost=self.address[0], + serverPort=self.address[1], + reason="Wait queue timeout elapsed without a connection becoming available", + error=ConnectionCheckOutFailedReason.TIMEOUT, + durationMS=duration, + ) timeout = _csot.get_timeout() or self.opts.wait_queue_timeout if self.opts.load_balanced: other_ops = self.active_sockets - self.ncursors - self.ntxns diff --git a/pymongo/pyopenssl_context.py b/pymongo/pyopenssl_context.py index fb00713553..b08588daff 100644 --- a/pymongo/pyopenssl_context.py +++ b/pymongo/pyopenssl_context.py @@ -25,10 +25,11 @@ from ipaddress import ip_address as _ip_address from typing import TYPE_CHECKING, Any, Callable, Optional, TypeVar, Union +import cryptography.x509 as x509 +import service_identity from OpenSSL import SSL as _SSL from OpenSSL import crypto as _crypto -from pymongo._lazy_import import lazy_import from pymongo.errors import ConfigurationError as _ConfigurationError from pymongo.errors import _CertificateError # type:ignore[attr-defined] from pymongo.ocsp_cache import _OCSPCache @@ -37,14 +38,9 @@ from pymongo.socket_checker import _errno_from_exception from pymongo.write_concern import validate_boolean -_x509 = lazy_import("cryptography.x509") -_service_identity = lazy_import("service_identity") -_service_identity_pyopenssl = lazy_import("service_identity.pyopenssl") - if TYPE_CHECKING: from ssl import VerifyMode - from cryptography.x509 import Certificate _T = TypeVar("_T") @@ -184,7 +180,7 @@ class _CallbackData: """Data class which is passed to the OCSP callback.""" def __init__(self) -> None: - self.trusted_ca_certs: Optional[list[Certificate]] = None + self.trusted_ca_certs: Optional[list[x509.Certificate]] = None self.check_ocsp_endpoint: Optional[bool] = None self.ocsp_response_cache = _OCSPCache() @@ -336,11 +332,12 @@ def _load_wincerts(self, store: str) -> None: """Attempt to load CA certs from Windows trust store.""" cert_store = self._ctx.get_cert_store() oid = _stdlibssl.Purpose.SERVER_AUTH.oid + for cert, encoding, trust in _stdlibssl.enum_certificates(store): # type: ignore if encoding == "x509_asn": if trust is True or oid in trust: cert_store.add_cert( - _crypto.X509.from_cryptography(_x509.load_der_x509_certificate(cert)) + _crypto.X509.from_cryptography(x509.load_der_x509_certificate(cert)) ) def load_default_certs(self) -> None: @@ -404,14 +401,16 @@ def wrap_socket( # XXX: Do this in a callback registered with # SSLContext.set_info_callback? See Twisted for an example. if self.check_hostname and server_hostname is not None: + from service_identity import pyopenssl + try: if _is_ip_address(server_hostname): - _service_identity_pyopenssl.verify_ip_address(ssl_conn, server_hostname) + pyopenssl.verify_ip_address(ssl_conn, server_hostname) else: - _service_identity_pyopenssl.verify_hostname(ssl_conn, server_hostname) - except ( - _service_identity.SICertificateError, - _service_identity.SIVerificationError, + pyopenssl.verify_hostname(ssl_conn, server_hostname) + except ( # type:ignore[misc] + service_identity.SICertificateError, + service_identity.SIVerificationError, ) as exc: raise _CertificateError(str(exc)) from None return ssl_conn diff --git a/pymongo/srv_resolver.py b/pymongo/srv_resolver.py index 4ee1b1f5b6..6f6cc285fa 100644 --- a/pymongo/srv_resolver.py +++ b/pymongo/srv_resolver.py @@ -17,17 +17,22 @@ import ipaddress import random -from typing import Any, Optional, Union +from typing import TYPE_CHECKING, Any, Optional, Union from pymongo.common import CONNECT_TIMEOUT from pymongo.errors import ConfigurationError -try: +if TYPE_CHECKING: from dns import resolver - _HAVE_DNSPYTHON = True -except ImportError: - _HAVE_DNSPYTHON = False + +def _have_dnspython() -> bool: + try: + import dns # noqa: F401 + + return True + except ImportError: + return False # dnspython can return bytes or str from various parts @@ -40,6 +45,8 @@ def maybe_decode(text: Union[str, bytes]) -> str: # PYTHON-2667 Lazily call dns.resolver methods for compatibility with eventlet. def _resolve(*args: Any, **kwargs: Any) -> resolver.Answer: + from dns import resolver + if hasattr(resolver, "resolve"): # dnspython >= 2 return resolver.resolve(*args, **kwargs) @@ -81,6 +88,8 @@ def __init__( raise ConfigurationError(_INVALID_HOST_MSG % (fqdn,)) def get_options(self) -> Optional[str]: + from dns import resolver + try: results = _resolve(self.__fqdn, "TXT", lifetime=self.__connect_timeout) except (resolver.NoAnswer, resolver.NXDOMAIN): diff --git a/pymongo/ssl_support.py b/pymongo/ssl_support.py index 849fbf7018..6a5dd278d3 100644 --- a/pymongo/ssl_support.py +++ b/pymongo/ssl_support.py @@ -15,6 +15,7 @@ """Support for SSL in PyMongo.""" from __future__ import annotations +import warnings from typing import Optional from pymongo.errors import ConfigurationError @@ -23,7 +24,17 @@ try: import pymongo.pyopenssl_context as _ssl -except ImportError: +except (ImportError, AttributeError) as exc: + if isinstance(exc, AttributeError): + warnings.warn( + "Failed to use the installed version of PyOpenSSL. " + "Falling back to stdlib ssl, disabling OCSP support. " + "This is likely caused by incompatible versions " + "of PyOpenSSL < 23.2.0 and cryptography >= 42.0.0. " + "Try updating PyOpenSSL >= 23.2.0 to enable OCSP.", + UserWarning, + stacklevel=2, + ) try: import pymongo.ssl_context as _ssl # type: ignore[no-redef] except ImportError: diff --git a/pymongo/topology.py b/pymongo/topology.py index 99adcae620..ea623cd1b4 100644 --- a/pymongo/topology.py +++ b/pymongo/topology.py @@ -44,7 +44,6 @@ from pymongo.logger import ( _SERVER_SELECTION_LOGGER, _debug_log, - _info_log, _ServerSelectionStatusMessage, ) from pymongo.monitor import SrvMonitor @@ -306,7 +305,7 @@ def _select_servers_loop( ) if not logged_waiting: - _info_log( + _debug_log( _SERVER_SELECTION_LOGGER, message=_ServerSelectionStatusMessage.WAITING, selector=selector, @@ -670,23 +669,14 @@ def description(self) -> TopologyDescription: def pop_all_sessions(self) -> list[_ServerSession]: """Pop all session ids from the pool.""" - with self._lock: - return self._session_pool.pop_all() + return self._session_pool.pop_all() def get_server_session(self, session_timeout_minutes: Optional[int]) -> _ServerSession: """Start or resume a server session, or raise ConfigurationError.""" - with self._lock: - return self._session_pool.get_server_session(session_timeout_minutes) + return self._session_pool.get_server_session(session_timeout_minutes) - def return_server_session(self, server_session: _ServerSession, lock: bool) -> None: - if lock: - with self._lock: - self._session_pool.return_server_session( - server_session, self._description.logical_session_timeout_minutes - ) - else: - # Called from a __del__ method, can't use a lock. - self._session_pool.return_server_session_no_lock(server_session) + def return_server_session(self, server_session: _ServerSession) -> None: + self._session_pool.return_server_session(server_session) def _new_selection(self) -> Selection: """A Selection object, initially including all known servers. diff --git a/pymongo/uri_parser.py b/pymongo/uri_parser.py index 7f4ef57f9c..4ebd3008c3 100644 --- a/pymongo/uri_parser.py +++ b/pymongo/uri_parser.py @@ -40,7 +40,7 @@ get_validated_options, ) from pymongo.errors import ConfigurationError, InvalidURI -from pymongo.srv_resolver import _HAVE_DNSPYTHON, _SrvResolver +from pymongo.srv_resolver import _have_dnspython, _SrvResolver from pymongo.typings import _Address if TYPE_CHECKING: @@ -472,7 +472,7 @@ def parse_uri( is_srv = False scheme_free = uri[SCHEME_LEN:] elif uri.startswith(SRV_SCHEME): - if not _HAVE_DNSPYTHON: + if not _have_dnspython(): python_path = sys.executable or "python" raise ConfigurationError( 'The "dnspython" module must be ' @@ -494,16 +494,11 @@ def parse_uri( collection = None options = _CaseInsensitiveDictionary() - host_part, _, path_part = scheme_free.partition("/") - if not host_part: - host_part = path_part - path_part = "" - - if path_part: - dbase, _, opts = path_part.partition("?") + host_plus_db_part, _, opts = scheme_free.partition("?") + if "/" in host_plus_db_part: + host_part, _, dbase = host_plus_db_part.partition("/") else: - # There was no slash in scheme_free, check for a sole "?". - host_part, _, opts = host_part.partition("?") + host_part = host_plus_db_part if dbase: dbase = unquote_plus(dbase) diff --git a/pyproject.toml b/pyproject.toml index 4520ba08c9..d208f6a439 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [build-system] -requires = ["setuptools>=63.0"] -build-backend = "setuptools.build_meta" +requires = ["hatchling>1.24","setuptools>=65.0","hatch-requirements-txt>=0.4.1"] +build-backend = "hatchling.build" [project] name = "pymongo" @@ -8,7 +8,7 @@ dynamic = ["version", "dependencies", "optional-dependencies"] description = "Python driver for MongoDB " readme = "README.md" license = {file="LICENSE"} -requires-python = ">=3.7" +requires-python = ">=3.8" authors = [ { name = "The MongoDB Python Team" }, ] @@ -30,7 +30,6 @@ classifiers = [ "Programming Language :: Python :: Implementation :: PyPy", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3 :: Only", - "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", @@ -46,16 +45,28 @@ Documentation = "https://pymongo.readthedocs.io" Source = "https://github.com/mongodb/mongo-python-driver" Tracker = "https://jira.mongodb.org/projects/PYTHON/issues" -[tool.setuptools.dynamic] -version = {attr = "pymongo._version.__version__"} +# Used to call hatch_build.py +[tool.hatch.build.hooks.custom] -[tool.setuptools.packages.find] -include = ["bson","gridfs", "pymongo"] +[tool.hatch.version] +path = "pymongo/_version.py" +validate-bump = false -[tool.setuptools.package-data] -bson=["py.typed", "*.pyi"] -pymongo=["py.typed", "*.pyi"] -gridfs=["py.typed", "*.pyi"] +[tool.hatch.build.targets.wheel] +packages = ["bson","gridfs", "pymongo"] + +[tool.hatch.metadata.hooks.requirements_txt] +files = ["requirements.txt"] + +[tool.hatch.metadata.hooks.requirements_txt.optional-dependencies] +aws = ["requirements/aws.txt"] +docs = ["requirements/docs.txt"] +encryption = ["requirements/encryption.txt"] +gssapi = ["requirements/gssapi.txt"] +ocsp = ["requirements/ocsp.txt"] +snappy = ["requirements/snappy.txt"] +test = ["requirements/test.txt"] +zstd = ["requirements/zstd.txt"] [tool.pytest.ini_options] minversion = "7" @@ -169,6 +180,7 @@ dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?)|dummy.*)$" "UP031", "F401", "B023", "F811"] "tools/*.py" = ["T201"] "green_framework_test.py" = ["T201"] +"hatch_build.py" = ["S"] [tool.coverage.run] branch = true diff --git a/sbom.json b/sbom.json new file mode 100644 index 0000000000..95b362f836 --- /dev/null +++ b/sbom.json @@ -0,0 +1,11 @@ +{ + "metadata": { + "timestamp": "2024-06-10T18:55:17.710940+00:00", + }, + "components": [], + "serialNumber": "urn:uuid:a6c08d96-55e1-4cdb-945c-0e21ced83e34", + "version": 1, + "$schema": "http://cyclonedx.org/schema/bom-1.5.schema.json", + "bomFormat": "CycloneDX", + "specVersion": "1.5" +} diff --git a/setup.py b/setup.py index 599ea0e4a9..f371b3d75b 100644 --- a/setup.py +++ b/setup.py @@ -1,167 +1,8 @@ from __future__ import annotations -import os -import sys -import warnings - -# Hack to silence atexit traceback in some Python versions -try: - import multiprocessing # noqa: F401 -except ImportError: - pass - -from setuptools import setup -from setuptools.command.build_ext import build_ext -from setuptools.extension import Extension - - -class custom_build_ext(build_ext): - """Allow C extension building to fail. - - The C extension speeds up BSON encoding, but is not essential. - """ - - warning_message = """ -******************************************************************** -WARNING: %s could not -be compiled. No C extensions are essential for PyMongo to run, -although they do result in significant speed improvements. -%s - -Please see the installation docs for solutions to build issues: - -https://pymongo.readthedocs.io/en/stable/installation.html - -Here are some hints for popular operating systems: - -If you are seeing this message on Linux you probably need to -install GCC and/or the Python development package for your -version of Python. - -Debian and Ubuntu users should issue the following command: - - $ sudo apt-get install build-essential python-dev - -Users of Red Hat based distributions (RHEL, CentOS, Amazon Linux, -Oracle Linux, Fedora, etc.) should issue the following command: - - $ sudo yum install gcc python-devel - -If you are seeing this message on Microsoft Windows please install -PyMongo using pip. Modern versions of pip will install PyMongo -from binary wheels available on pypi. If you must install from -source read the documentation here: - -https://pymongo.readthedocs.io/en/stable/installation.html#installing-from-source-on-windows - -If you are seeing this message on macOS / OSX please install PyMongo -using pip. Modern versions of pip will install PyMongo from binary -wheels available on pypi. If wheels are not available for your version -of macOS / OSX, or you must install from source read the documentation -here: - -https://pymongo.readthedocs.io/en/stable/installation.html#osx -******************************************************************** -""" - - def run(self): - try: - build_ext.run(self) - except Exception: - if os.environ.get("PYMONGO_C_EXT_MUST_BUILD"): - raise - e = sys.exc_info()[1] - sys.stdout.write("%s\n" % str(e)) - warnings.warn( - self.warning_message - % ( - "Extension modules", - "There was an issue with your platform configuration - see above.", - ), - stacklevel=2, - ) - - def build_extension(self, ext): - name = ext.name - try: - build_ext.build_extension(self, ext) - except Exception: - if os.environ.get("PYMONGO_C_EXT_MUST_BUILD"): - raise - e = sys.exc_info()[1] - sys.stdout.write("%s\n" % str(e)) - warnings.warn( - self.warning_message - % ( - "The %s extension module" % (name,), # noqa: UP031 - "The output above this warning shows how the compilation failed.", - ), - stacklevel=2, - ) - - -ext_modules = [ - Extension( - "bson._cbson", - include_dirs=["bson"], - sources=["bson/_cbsonmodule.c", "bson/time64.c", "bson/buffer.c"], - ), - Extension( - "pymongo._cmessage", - include_dirs=["bson"], - sources=[ - "pymongo/_cmessagemodule.c", - "bson/_cbsonmodule.c", - "bson/time64.c", - "bson/buffer.c", - ], - ), -] - - -if "--no_ext" in sys.argv or os.environ.get("NO_EXT"): - try: - sys.argv.remove("--no_ext") - except ValueError: - pass - ext_modules = [] -elif sys.platform.startswith("java") or sys.platform == "cli" or "PyPy" in sys.version: - sys.stdout.write( - """ -*****************************************************\n -The optional C extensions are currently not supported\n -by this python implementation.\n -*****************************************************\n -""" - ) - ext_modules = [] - - -def parse_reqs_file(fname): - with open(fname) as fid: - lines = [li.strip() for li in fid.readlines()] - return [li for li in lines if li and not li.startswith("#")] - - -dependencies = parse_reqs_file("requirements.txt") - -extras_require = dict( - aws=parse_reqs_file("requirements/aws.txt"), - encryption=parse_reqs_file("requirements/encryption.txt"), - gssapi=parse_reqs_file("requirements/gssapi.txt"), - ocsp=parse_reqs_file("requirements/ocsp.txt"), - snappy=parse_reqs_file("requirements/snappy.txt"), - # PYTHON-3423 Removed in 4.3 but kept here to avoid pip warnings. - srv=[], - tls=[], - # PYTHON-2133 Removed in 4.0 but kept here to avoid pip warnings. - zstd=parse_reqs_file("requirements/zstd.txt"), - test=parse_reqs_file("requirements/test.txt"), +msg = ( + "PyMongo>=4.8 no longer supports building via setup.py, use python -m pip install instead. If " + "this is an editable install (-e) please upgrade to pip>=21.3 first: python -m pip install --upgrade pip" ) -setup( - cmdclass={"build_ext": custom_build_ext}, - install_requires=dependencies, - extras_require=extras_require, - ext_modules=ext_modules, -) # type:ignore +raise RuntimeError(msg) diff --git a/test/__init__.py b/test/__init__.py index f9260be33c..c516838f47 100644 --- a/test/__init__.py +++ b/test/__init__.py @@ -277,6 +277,7 @@ def __init__(self): self.is_data_lake = False self.load_balancer = TEST_LOADBALANCER self.serverless = TEST_SERVERLESS + self._fips_enabled = None if self.load_balancer or self.serverless: self.default_client_options["loadBalanced"] = True if COMPRESSORS: @@ -352,8 +353,7 @@ def _init_client(self): if self.client is not None: # Return early when connected to dataLake as mongohoused does not # support the getCmdLineOpts command and is tested without TLS. - build_info: Any = self.client.admin.command("buildInfo") - if "dataLake" in build_info: + if os.environ.get("TEST_DATA_LAKE"): self.is_data_lake = True self.auth_enabled = True self.client = self._connect(host, port, username=db_user, password=db_pwd) @@ -524,6 +524,17 @@ def storage_engine(self): # Raised if self.server_status is None. return None + @property + def fips_enabled(self): + if self._fips_enabled is not None: + return self._fips_enabled + try: + subprocess.check_call(["fips-mode-setup", "--is-enabled"]) + self._fips_enabled = True + except (subprocess.SubprocessError, FileNotFoundError): + self._fips_enabled = False + return self._fips_enabled + def check_auth_type(self, auth_type): auth_mechs = self.server_parameters.get("authenticationMechanisms", []) return auth_type in auth_mechs @@ -671,6 +682,12 @@ def require_auth(self, func): lambda: self.auth_enabled, "Authentication is not enabled on the server", func=func ) + def require_no_fips(self, func): + """Run a test only if the host does not have FIPS enabled.""" + return self._require( + lambda: not self.fips_enabled, "Test cannot run on a FIPS-enabled host", func=func + ) + def require_no_auth(self, func): """Run a test only if the server is running without auth enabled.""" return self._require( diff --git a/test/auth/legacy/connection-string.json b/test/auth/legacy/connection-string.json index f8fe0aeb51..57fd9d4a11 100644 --- a/test/auth/legacy/connection-string.json +++ b/test/auth/legacy/connection-string.json @@ -559,7 +559,7 @@ }, { "description": "should handle a complicated url-encoded TOKEN_RESOURCE (MONGODB-OIDC)", - "uri": "mongodb://user@localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=ENVIRONMENT:azure,TOKEN_RESOURCE:abc%2Cd%25ef%3Ag%26hi", + "uri": "mongodb://user@localhost/?authMechanism=MONGODB-OIDC&authMechanismProperties=ENVIRONMENT:azure,TOKEN_RESOURCE:abcd%25ef%3Ag%26hi", "valid": true, "credential": { "username": "user", @@ -568,7 +568,7 @@ "mechanism": "MONGODB-OIDC", "mechanism_properties": { "ENVIRONMENT": "azure", - "TOKEN_RESOURCE": "abc,d%ef:g&hi" + "TOKEN_RESOURCE": "abcd%ef:g&hi" } } }, diff --git a/test/auth/unified/mongodb-oidc-no-retry.json b/test/auth/unified/mongodb-oidc-no-retry.json index 9dbe198270..0a8658455e 100644 --- a/test/auth/unified/mongodb-oidc-no-retry.json +++ b/test/auth/unified/mongodb-oidc-no-retry.json @@ -5,7 +5,8 @@ { "minServerVersion": "7.0", "auth": true, - "authMechanism": "MONGODB-OIDC" + "authMechanism": "MONGODB-OIDC", + "serverless": "forbid" } ], "createEntities": [ diff --git a/test/auth_oidc/test_auth_oidc.py b/test/auth_oidc/test_auth_oidc.py index 7ea565935d..c7614fa0c3 100644 --- a/test/auth_oidc/test_auth_oidc.py +++ b/test/auth_oidc/test_auth_oidc.py @@ -824,23 +824,7 @@ def fetch(self, a): # Close the client. client.close() - def test_2_4_oidc_callback_returns_invalid_data(self): - # Create a MongoClient configured with an OIDC callback that returns data not conforming to the OIDCCredential with extra fields. - class CustomCallback(OIDCCallback): - count = 0 - - def fetch(self, a): - self.count += 1 - return OIDCCallbackResult(access_token="bad value") - - client = self.create_client(request_cb=CustomCallback()) - # Perform a ``find`` operation that fails. - with self.assertRaises(OperationFailure): - client.test.test.find_one() - # Close the client. - client.close() - - def test_2_5_invalid_client_configuration_with_callback(self): + def test_2_4_invalid_client_configuration_with_callback(self): # Create a MongoClient configured with an OIDC callback and auth mechanism property ENVIRONMENT:test. request_cb = self.create_request_cb() props: Dict = {"OIDC_CALLBACK": request_cb, "ENVIRONMENT": "test"} @@ -848,6 +832,13 @@ def test_2_5_invalid_client_configuration_with_callback(self): with self.assertRaises(ConfigurationError): self.create_client(authmechanismproperties=props) + def test_2_5_invalid_use_of_ALLOWED_HOSTS(self): + # Create an OIDC configured client with auth mechanism properties `{"ENVIRONMENT": "azure", "ALLOWED_HOSTS": []}`. + props: Dict = {"ENVIRONMENT": "azure", "ALLOWED_HOSTS": []} + # Assert it returns a client configuration error. + with self.assertRaises(ConfigurationError): + self.create_client(authmechanismproperties=props) + def test_3_1_authentication_failure_with_cached_tokens_fetch_a_new_token_and_retry(self): # Create a MongoClient and an OIDC callback that implements the provider logic. client = self.create_client() diff --git a/test/client-side-encryption/spec/legacy/fle2v2-Range-Date-Aggregate.json b/test/client-side-encryption/spec/legacy/fle2v2-Range-Date-Aggregate.json index ba53b007b5..9eaabe0d71 100644 --- a/test/client-side-encryption/spec/legacy/fle2v2-Range-Date-Aggregate.json +++ b/test/client-side-encryption/spec/legacy/fle2v2-Range-Date-Aggregate.json @@ -6,7 +6,8 @@ "replicaset", "sharded", "load-balanced" - ] + ], + "maxServerVersion": "7.99.99" } ], "database_name": "default", diff --git a/test/client-side-encryption/spec/legacy/fle2v2-Range-Date-Correctness.json b/test/client-side-encryption/spec/legacy/fle2v2-Range-Date-Correctness.json index e9620efbed..fa887e0892 100644 --- a/test/client-side-encryption/spec/legacy/fle2v2-Range-Date-Correctness.json +++ b/test/client-side-encryption/spec/legacy/fle2v2-Range-Date-Correctness.json @@ -6,7 +6,8 @@ "replicaset", "sharded", "load-balanced" - ] + ], + "maxServerVersion": "7.99.99" } ], "database_name": "default", diff --git a/test/client-side-encryption/spec/legacy/fle2v2-Range-Date-Delete.json b/test/client-side-encryption/spec/legacy/fle2v2-Range-Date-Delete.json index daaa093896..cce4faf188 100644 --- a/test/client-side-encryption/spec/legacy/fle2v2-Range-Date-Delete.json +++ b/test/client-side-encryption/spec/legacy/fle2v2-Range-Date-Delete.json @@ -6,7 +6,8 @@ "replicaset", "sharded", "load-balanced" - ] + ], + "maxServerVersion": "7.99.99" } ], "database_name": "default", diff --git a/test/client-side-encryption/spec/legacy/fle2v2-Range-Date-FindOneAndUpdate.json b/test/client-side-encryption/spec/legacy/fle2v2-Range-Date-FindOneAndUpdate.json index 8500fa8291..4392b67686 100644 --- a/test/client-side-encryption/spec/legacy/fle2v2-Range-Date-FindOneAndUpdate.json +++ b/test/client-side-encryption/spec/legacy/fle2v2-Range-Date-FindOneAndUpdate.json @@ -6,7 +6,8 @@ "replicaset", "sharded", "load-balanced" - ] + ], + "maxServerVersion": "7.99.99" } ], "database_name": "default", diff --git a/test/client-side-encryption/spec/legacy/fle2v2-Range-Date-InsertFind.json b/test/client-side-encryption/spec/legacy/fle2v2-Range-Date-InsertFind.json index 7de45ba00e..27ce7881df 100644 --- a/test/client-side-encryption/spec/legacy/fle2v2-Range-Date-InsertFind.json +++ b/test/client-side-encryption/spec/legacy/fle2v2-Range-Date-InsertFind.json @@ -6,7 +6,8 @@ "replicaset", "sharded", "load-balanced" - ] + ], + "maxServerVersion": "7.99.99" } ], "database_name": "default", diff --git a/test/client-side-encryption/spec/legacy/fle2v2-Range-Date-Update.json b/test/client-side-encryption/spec/legacy/fle2v2-Range-Date-Update.json index d5b62be062..f7d5a6af66 100644 --- a/test/client-side-encryption/spec/legacy/fle2v2-Range-Date-Update.json +++ b/test/client-side-encryption/spec/legacy/fle2v2-Range-Date-Update.json @@ -6,7 +6,8 @@ "replicaset", "sharded", "load-balanced" - ] + ], + "maxServerVersion": "7.99.99" } ], "database_name": "default", diff --git a/test/client-side-encryption/spec/legacy/fle2v2-Range-Decimal-Aggregate.json b/test/client-side-encryption/spec/legacy/fle2v2-Range-Decimal-Aggregate.json index 081bc577fb..401ee34e3f 100644 --- a/test/client-side-encryption/spec/legacy/fle2v2-Range-Decimal-Aggregate.json +++ b/test/client-side-encryption/spec/legacy/fle2v2-Range-Decimal-Aggregate.json @@ -4,7 +4,8 @@ "minServerVersion": "7.0.0", "topology": [ "replicaset" - ] + ], + "maxServerVersion": "7.99.99" } ], "database_name": "default", diff --git a/test/client-side-encryption/spec/legacy/fle2v2-Range-Decimal-Correctness.json b/test/client-side-encryption/spec/legacy/fle2v2-Range-Decimal-Correctness.json index 12fe7c8bc8..758d3e5732 100644 --- a/test/client-side-encryption/spec/legacy/fle2v2-Range-Decimal-Correctness.json +++ b/test/client-side-encryption/spec/legacy/fle2v2-Range-Decimal-Correctness.json @@ -4,7 +4,8 @@ "minServerVersion": "7.0.0", "topology": [ "replicaset" - ] + ], + "maxServerVersion": "7.99.99" } ], "database_name": "default", diff --git a/test/client-side-encryption/spec/legacy/fle2v2-Range-Decimal-Delete.json b/test/client-side-encryption/spec/legacy/fle2v2-Range-Decimal-Delete.json index ac49d16a20..24a08f318c 100644 --- a/test/client-side-encryption/spec/legacy/fle2v2-Range-Decimal-Delete.json +++ b/test/client-side-encryption/spec/legacy/fle2v2-Range-Decimal-Delete.json @@ -4,7 +4,8 @@ "minServerVersion": "7.0.0", "topology": [ "replicaset" - ] + ], + "maxServerVersion": "7.99.99" } ], "database_name": "default", diff --git a/test/client-side-encryption/spec/legacy/fle2v2-Range-Decimal-FindOneAndUpdate.json b/test/client-side-encryption/spec/legacy/fle2v2-Range-Decimal-FindOneAndUpdate.json index 88a2350786..2a8070ecf9 100644 --- a/test/client-side-encryption/spec/legacy/fle2v2-Range-Decimal-FindOneAndUpdate.json +++ b/test/client-side-encryption/spec/legacy/fle2v2-Range-Decimal-FindOneAndUpdate.json @@ -4,7 +4,8 @@ "minServerVersion": "7.0.0", "topology": [ "replicaset" - ] + ], + "maxServerVersion": "7.99.99" } ], "database_name": "default", diff --git a/test/client-side-encryption/spec/legacy/fle2v2-Range-Decimal-InsertFind.json b/test/client-side-encryption/spec/legacy/fle2v2-Range-Decimal-InsertFind.json index 54e43e4a26..2ef63f42b9 100644 --- a/test/client-side-encryption/spec/legacy/fle2v2-Range-Decimal-InsertFind.json +++ b/test/client-side-encryption/spec/legacy/fle2v2-Range-Decimal-InsertFind.json @@ -4,7 +4,8 @@ "minServerVersion": "7.0.0", "topology": [ "replicaset" - ] + ], + "maxServerVersion": "7.99.99" } ], "database_name": "default", diff --git a/test/client-side-encryption/spec/legacy/fle2v2-Range-Decimal-Update.json b/test/client-side-encryption/spec/legacy/fle2v2-Range-Decimal-Update.json index b2b8136a9a..8064eb1b18 100644 --- a/test/client-side-encryption/spec/legacy/fle2v2-Range-Decimal-Update.json +++ b/test/client-side-encryption/spec/legacy/fle2v2-Range-Decimal-Update.json @@ -4,7 +4,8 @@ "minServerVersion": "7.0.0", "topology": [ "replicaset" - ] + ], + "maxServerVersion": "7.99.99" } ], "database_name": "default", diff --git a/test/client-side-encryption/spec/legacy/fle2v2-Range-DecimalPrecision-Aggregate.json b/test/client-side-encryption/spec/legacy/fle2v2-Range-DecimalPrecision-Aggregate.json index b078d18172..8cf143c094 100644 --- a/test/client-side-encryption/spec/legacy/fle2v2-Range-DecimalPrecision-Aggregate.json +++ b/test/client-side-encryption/spec/legacy/fle2v2-Range-DecimalPrecision-Aggregate.json @@ -6,7 +6,8 @@ "replicaset", "sharded", "load-balanced" - ] + ], + "maxServerVersion": "7.99.99" } ], "database_name": "default", diff --git a/test/client-side-encryption/spec/legacy/fle2v2-Range-DecimalPrecision-Correctness.json b/test/client-side-encryption/spec/legacy/fle2v2-Range-DecimalPrecision-Correctness.json index 0859e702aa..a4b06998f7 100644 --- a/test/client-side-encryption/spec/legacy/fle2v2-Range-DecimalPrecision-Correctness.json +++ b/test/client-side-encryption/spec/legacy/fle2v2-Range-DecimalPrecision-Correctness.json @@ -6,7 +6,8 @@ "replicaset", "sharded", "load-balanced" - ] + ], + "maxServerVersion": "7.99.99" } ], "database_name": "default", diff --git a/test/client-side-encryption/spec/legacy/fle2v2-Range-DecimalPrecision-Delete.json b/test/client-side-encryption/spec/legacy/fle2v2-Range-DecimalPrecision-Delete.json index 6e1ad90cd4..fad8234838 100644 --- a/test/client-side-encryption/spec/legacy/fle2v2-Range-DecimalPrecision-Delete.json +++ b/test/client-side-encryption/spec/legacy/fle2v2-Range-DecimalPrecision-Delete.json @@ -6,7 +6,8 @@ "replicaset", "sharded", "load-balanced" - ] + ], + "maxServerVersion": "7.99.99" } ], "database_name": "default", diff --git a/test/client-side-encryption/spec/legacy/fle2v2-Range-DecimalPrecision-FindOneAndUpdate.json b/test/client-side-encryption/spec/legacy/fle2v2-Range-DecimalPrecision-FindOneAndUpdate.json index 1cfd19a1e8..fb8f4f4140 100644 --- a/test/client-side-encryption/spec/legacy/fle2v2-Range-DecimalPrecision-FindOneAndUpdate.json +++ b/test/client-side-encryption/spec/legacy/fle2v2-Range-DecimalPrecision-FindOneAndUpdate.json @@ -6,7 +6,8 @@ "replicaset", "sharded", "load-balanced" - ] + ], + "maxServerVersion": "7.99.99" } ], "database_name": "default", diff --git a/test/client-side-encryption/spec/legacy/fle2v2-Range-DecimalPrecision-InsertFind.json b/test/client-side-encryption/spec/legacy/fle2v2-Range-DecimalPrecision-InsertFind.json index da76609727..79562802e6 100644 --- a/test/client-side-encryption/spec/legacy/fle2v2-Range-DecimalPrecision-InsertFind.json +++ b/test/client-side-encryption/spec/legacy/fle2v2-Range-DecimalPrecision-InsertFind.json @@ -6,7 +6,8 @@ "replicaset", "sharded", "load-balanced" - ] + ], + "maxServerVersion": "7.99.99" } ], "database_name": "default", diff --git a/test/client-side-encryption/spec/legacy/fle2v2-Range-DecimalPrecision-Update.json b/test/client-side-encryption/spec/legacy/fle2v2-Range-DecimalPrecision-Update.json index 2d201948cf..cc93b76948 100644 --- a/test/client-side-encryption/spec/legacy/fle2v2-Range-DecimalPrecision-Update.json +++ b/test/client-side-encryption/spec/legacy/fle2v2-Range-DecimalPrecision-Update.json @@ -6,7 +6,8 @@ "replicaset", "sharded", "load-balanced" - ] + ], + "maxServerVersion": "7.99.99" } ], "database_name": "default", diff --git a/test/client-side-encryption/spec/legacy/fle2v2-Range-Double-Aggregate.json b/test/client-side-encryption/spec/legacy/fle2v2-Range-Double-Aggregate.json index c188f1f5a9..79f26660f2 100644 --- a/test/client-side-encryption/spec/legacy/fle2v2-Range-Double-Aggregate.json +++ b/test/client-side-encryption/spec/legacy/fle2v2-Range-Double-Aggregate.json @@ -6,7 +6,8 @@ "replicaset", "sharded", "load-balanced" - ] + ], + "maxServerVersion": "7.99.99" } ], "database_name": "default", diff --git a/test/client-side-encryption/spec/legacy/fle2v2-Range-Double-Correctness.json b/test/client-side-encryption/spec/legacy/fle2v2-Range-Double-Correctness.json index 3e298127dd..117e56af62 100644 --- a/test/client-side-encryption/spec/legacy/fle2v2-Range-Double-Correctness.json +++ b/test/client-side-encryption/spec/legacy/fle2v2-Range-Double-Correctness.json @@ -6,7 +6,8 @@ "replicaset", "sharded", "load-balanced" - ] + ], + "maxServerVersion": "7.99.99" } ], "database_name": "default", diff --git a/test/client-side-encryption/spec/legacy/fle2v2-Range-Double-Delete.json b/test/client-side-encryption/spec/legacy/fle2v2-Range-Double-Delete.json index dc0ba435f0..40d8ed5bb2 100644 --- a/test/client-side-encryption/spec/legacy/fle2v2-Range-Double-Delete.json +++ b/test/client-side-encryption/spec/legacy/fle2v2-Range-Double-Delete.json @@ -6,7 +6,8 @@ "replicaset", "sharded", "load-balanced" - ] + ], + "maxServerVersion": "7.99.99" } ], "database_name": "default", diff --git a/test/client-side-encryption/spec/legacy/fle2v2-Range-Double-FindOneAndUpdate.json b/test/client-side-encryption/spec/legacy/fle2v2-Range-Double-FindOneAndUpdate.json index 4b96575e18..f0893ce661 100644 --- a/test/client-side-encryption/spec/legacy/fle2v2-Range-Double-FindOneAndUpdate.json +++ b/test/client-side-encryption/spec/legacy/fle2v2-Range-Double-FindOneAndUpdate.json @@ -6,7 +6,8 @@ "replicaset", "sharded", "load-balanced" - ] + ], + "maxServerVersion": "7.99.99" } ], "database_name": "default", diff --git a/test/client-side-encryption/spec/legacy/fle2v2-Range-Double-InsertFind.json b/test/client-side-encryption/spec/legacy/fle2v2-Range-Double-InsertFind.json index 4827b6838f..d3dc2f830c 100644 --- a/test/client-side-encryption/spec/legacy/fle2v2-Range-Double-InsertFind.json +++ b/test/client-side-encryption/spec/legacy/fle2v2-Range-Double-InsertFind.json @@ -6,7 +6,8 @@ "replicaset", "sharded", "load-balanced" - ] + ], + "maxServerVersion": "7.99.99" } ], "database_name": "default", diff --git a/test/client-side-encryption/spec/legacy/fle2v2-Range-Double-Update.json b/test/client-side-encryption/spec/legacy/fle2v2-Range-Double-Update.json index c3284ad0ff..9d6a1fbfdd 100644 --- a/test/client-side-encryption/spec/legacy/fle2v2-Range-Double-Update.json +++ b/test/client-side-encryption/spec/legacy/fle2v2-Range-Double-Update.json @@ -6,7 +6,8 @@ "replicaset", "sharded", "load-balanced" - ] + ], + "maxServerVersion": "7.99.99" } ], "database_name": "default", diff --git a/test/client-side-encryption/spec/legacy/fle2v2-Range-DoublePrecision-Aggregate.json b/test/client-side-encryption/spec/legacy/fle2v2-Range-DoublePrecision-Aggregate.json index a2c1f3b75d..4188685a2c 100644 --- a/test/client-side-encryption/spec/legacy/fle2v2-Range-DoublePrecision-Aggregate.json +++ b/test/client-side-encryption/spec/legacy/fle2v2-Range-DoublePrecision-Aggregate.json @@ -6,7 +6,8 @@ "replicaset", "sharded", "load-balanced" - ] + ], + "maxServerVersion": "7.99.99" } ], "database_name": "default", diff --git a/test/client-side-encryption/spec/legacy/fle2v2-Range-DoublePrecision-Correctness.json b/test/client-side-encryption/spec/legacy/fle2v2-Range-DoublePrecision-Correctness.json index d0c0601ce8..60f1ea7a33 100644 --- a/test/client-side-encryption/spec/legacy/fle2v2-Range-DoublePrecision-Correctness.json +++ b/test/client-side-encryption/spec/legacy/fle2v2-Range-DoublePrecision-Correctness.json @@ -6,7 +6,8 @@ "replicaset", "sharded", "load-balanced" - ] + ], + "maxServerVersion": "7.99.99" } ], "database_name": "default", diff --git a/test/client-side-encryption/spec/legacy/fle2v2-Range-DoublePrecision-Delete.json b/test/client-side-encryption/spec/legacy/fle2v2-Range-DoublePrecision-Delete.json index a617442ee0..4ed591d3f8 100644 --- a/test/client-side-encryption/spec/legacy/fle2v2-Range-DoublePrecision-Delete.json +++ b/test/client-side-encryption/spec/legacy/fle2v2-Range-DoublePrecision-Delete.json @@ -6,7 +6,8 @@ "replicaset", "sharded", "load-balanced" - ] + ], + "maxServerVersion": "7.99.99" } ], "database_name": "default", diff --git a/test/client-side-encryption/spec/legacy/fle2v2-Range-DoublePrecision-FindOneAndUpdate.json b/test/client-side-encryption/spec/legacy/fle2v2-Range-DoublePrecision-FindOneAndUpdate.json index 5565fb179e..d8fbbfae73 100644 --- a/test/client-side-encryption/spec/legacy/fle2v2-Range-DoublePrecision-FindOneAndUpdate.json +++ b/test/client-side-encryption/spec/legacy/fle2v2-Range-DoublePrecision-FindOneAndUpdate.json @@ -6,7 +6,8 @@ "replicaset", "sharded", "load-balanced" - ] + ], + "maxServerVersion": "7.99.99" } ], "database_name": "default", diff --git a/test/client-side-encryption/spec/legacy/fle2v2-Range-DoublePrecision-InsertFind.json b/test/client-side-encryption/spec/legacy/fle2v2-Range-DoublePrecision-InsertFind.json index a1d8c17855..4213b066d1 100644 --- a/test/client-side-encryption/spec/legacy/fle2v2-Range-DoublePrecision-InsertFind.json +++ b/test/client-side-encryption/spec/legacy/fle2v2-Range-DoublePrecision-InsertFind.json @@ -6,7 +6,8 @@ "replicaset", "sharded", "load-balanced" - ] + ], + "maxServerVersion": "7.99.99" } ], "database_name": "default", diff --git a/test/client-side-encryption/spec/legacy/fle2v2-Range-DoublePrecision-Update.json b/test/client-side-encryption/spec/legacy/fle2v2-Range-DoublePrecision-Update.json index 6ea99242b1..89eb4c338d 100644 --- a/test/client-side-encryption/spec/legacy/fle2v2-Range-DoublePrecision-Update.json +++ b/test/client-side-encryption/spec/legacy/fle2v2-Range-DoublePrecision-Update.json @@ -6,7 +6,8 @@ "replicaset", "sharded", "load-balanced" - ] + ], + "maxServerVersion": "7.99.99" } ], "database_name": "default", diff --git a/test/client-side-encryption/spec/legacy/fle2v2-Range-Int-Aggregate.json b/test/client-side-encryption/spec/legacy/fle2v2-Range-Int-Aggregate.json index b3b2826faf..686f0241ba 100644 --- a/test/client-side-encryption/spec/legacy/fle2v2-Range-Int-Aggregate.json +++ b/test/client-side-encryption/spec/legacy/fle2v2-Range-Int-Aggregate.json @@ -6,7 +6,8 @@ "replicaset", "sharded", "load-balanced" - ] + ], + "maxServerVersion": "7.99.99" } ], "database_name": "default", diff --git a/test/client-side-encryption/spec/legacy/fle2v2-Range-Int-Correctness.json b/test/client-side-encryption/spec/legacy/fle2v2-Range-Int-Correctness.json index 4932223ba2..2964624f22 100644 --- a/test/client-side-encryption/spec/legacy/fle2v2-Range-Int-Correctness.json +++ b/test/client-side-encryption/spec/legacy/fle2v2-Range-Int-Correctness.json @@ -6,7 +6,8 @@ "replicaset", "sharded", "load-balanced" - ] + ], + "maxServerVersion": "7.99.99" } ], "database_name": "default", diff --git a/test/client-side-encryption/spec/legacy/fle2v2-Range-Int-Delete.json b/test/client-side-encryption/spec/legacy/fle2v2-Range-Int-Delete.json index 03f816e4b2..531b3e7590 100644 --- a/test/client-side-encryption/spec/legacy/fle2v2-Range-Int-Delete.json +++ b/test/client-side-encryption/spec/legacy/fle2v2-Range-Int-Delete.json @@ -6,7 +6,8 @@ "replicaset", "sharded", "load-balanced" - ] + ], + "maxServerVersion": "7.99.99" } ], "database_name": "default", diff --git a/test/client-side-encryption/spec/legacy/fle2v2-Range-Int-FindOneAndUpdate.json b/test/client-side-encryption/spec/legacy/fle2v2-Range-Int-FindOneAndUpdate.json index d573f7b6a0..402086cdb6 100644 --- a/test/client-side-encryption/spec/legacy/fle2v2-Range-Int-FindOneAndUpdate.json +++ b/test/client-side-encryption/spec/legacy/fle2v2-Range-Int-FindOneAndUpdate.json @@ -6,7 +6,8 @@ "replicaset", "sharded", "load-balanced" - ] + ], + "maxServerVersion": "7.99.99" } ], "database_name": "default", diff --git a/test/client-side-encryption/spec/legacy/fle2v2-Range-Int-InsertFind.json b/test/client-side-encryption/spec/legacy/fle2v2-Range-Int-InsertFind.json index 04953663fe..965b8a5516 100644 --- a/test/client-side-encryption/spec/legacy/fle2v2-Range-Int-InsertFind.json +++ b/test/client-side-encryption/spec/legacy/fle2v2-Range-Int-InsertFind.json @@ -6,7 +6,8 @@ "replicaset", "sharded", "load-balanced" - ] + ], + "maxServerVersion": "7.99.99" } ], "database_name": "default", diff --git a/test/client-side-encryption/spec/legacy/fle2v2-Range-Int-Update.json b/test/client-side-encryption/spec/legacy/fle2v2-Range-Int-Update.json index 4c7a3c2789..6cf44ac782 100644 --- a/test/client-side-encryption/spec/legacy/fle2v2-Range-Int-Update.json +++ b/test/client-side-encryption/spec/legacy/fle2v2-Range-Int-Update.json @@ -6,7 +6,8 @@ "replicaset", "sharded", "load-balanced" - ] + ], + "maxServerVersion": "7.99.99" } ], "database_name": "default", diff --git a/test/client-side-encryption/spec/legacy/fle2v2-Range-Long-Aggregate.json b/test/client-side-encryption/spec/legacy/fle2v2-Range-Long-Aggregate.json index a7e77fd5c3..6edb38a800 100644 --- a/test/client-side-encryption/spec/legacy/fle2v2-Range-Long-Aggregate.json +++ b/test/client-side-encryption/spec/legacy/fle2v2-Range-Long-Aggregate.json @@ -6,7 +6,8 @@ "replicaset", "sharded", "load-balanced" - ] + ], + "maxServerVersion": "7.99.99" } ], "database_name": "default", diff --git a/test/client-side-encryption/spec/legacy/fle2v2-Range-Long-Correctness.json b/test/client-side-encryption/spec/legacy/fle2v2-Range-Long-Correctness.json index 365822c79c..3d33f7381b 100644 --- a/test/client-side-encryption/spec/legacy/fle2v2-Range-Long-Correctness.json +++ b/test/client-side-encryption/spec/legacy/fle2v2-Range-Long-Correctness.json @@ -6,7 +6,8 @@ "replicaset", "sharded", "load-balanced" - ] + ], + "maxServerVersion": "7.99.99" } ], "database_name": "default", diff --git a/test/client-side-encryption/spec/legacy/fle2v2-Range-Long-Delete.json b/test/client-side-encryption/spec/legacy/fle2v2-Range-Long-Delete.json index 17a01fe076..1b32782010 100644 --- a/test/client-side-encryption/spec/legacy/fle2v2-Range-Long-Delete.json +++ b/test/client-side-encryption/spec/legacy/fle2v2-Range-Long-Delete.json @@ -6,7 +6,8 @@ "replicaset", "sharded", "load-balanced" - ] + ], + "maxServerVersion": "7.99.99" } ], "database_name": "default", diff --git a/test/client-side-encryption/spec/legacy/fle2v2-Range-Long-FindOneAndUpdate.json b/test/client-side-encryption/spec/legacy/fle2v2-Range-Long-FindOneAndUpdate.json index 918d0dfeed..b8e3b888a8 100644 --- a/test/client-side-encryption/spec/legacy/fle2v2-Range-Long-FindOneAndUpdate.json +++ b/test/client-side-encryption/spec/legacy/fle2v2-Range-Long-FindOneAndUpdate.json @@ -6,7 +6,8 @@ "replicaset", "sharded", "load-balanced" - ] + ], + "maxServerVersion": "7.99.99" } ], "database_name": "default", diff --git a/test/client-side-encryption/spec/legacy/fle2v2-Range-Long-InsertFind.json b/test/client-side-encryption/spec/legacy/fle2v2-Range-Long-InsertFind.json index 9fafd10d4b..d637fcf9e7 100644 --- a/test/client-side-encryption/spec/legacy/fle2v2-Range-Long-InsertFind.json +++ b/test/client-side-encryption/spec/legacy/fle2v2-Range-Long-InsertFind.json @@ -6,7 +6,8 @@ "replicaset", "sharded", "load-balanced" - ] + ], + "maxServerVersion": "7.99.99" } ], "database_name": "default", diff --git a/test/client-side-encryption/spec/legacy/fle2v2-Range-Long-Update.json b/test/client-side-encryption/spec/legacy/fle2v2-Range-Long-Update.json index 20ac25bfad..1b76019a4c 100644 --- a/test/client-side-encryption/spec/legacy/fle2v2-Range-Long-Update.json +++ b/test/client-side-encryption/spec/legacy/fle2v2-Range-Long-Update.json @@ -6,7 +6,8 @@ "replicaset", "sharded", "load-balanced" - ] + ], + "maxServerVersion": "7.99.99" } ], "database_name": "default", diff --git a/test/client-side-encryption/spec/legacy/fle2v2-Range-WrongType.json b/test/client-side-encryption/spec/legacy/fle2v2-Range-WrongType.json index 5a6e650ab4..704a693b8f 100644 --- a/test/client-side-encryption/spec/legacy/fle2v2-Range-WrongType.json +++ b/test/client-side-encryption/spec/legacy/fle2v2-Range-WrongType.json @@ -6,7 +6,8 @@ "replicaset", "sharded", "load-balanced" - ] + ], + "maxServerVersion": "7.99.99" } ], "database_name": "default", diff --git a/test/connection_monitoring/pool-clear-interrupting-pending-connections.json b/test/connection_monitoring/pool-clear-interrupting-pending-connections.json index c1fd746329..ceae07a1c7 100644 --- a/test/connection_monitoring/pool-clear-interrupting-pending-connections.json +++ b/test/connection_monitoring/pool-clear-interrupting-pending-connections.json @@ -17,7 +17,7 @@ ], "closeConnection": false, "blockConnection": true, - "blockTimeMS": 10000 + "blockTimeMS": 1000 } }, "poolOptions": { diff --git a/test/connection_string/test/valid-options.json b/test/connection_string/test/valid-options.json index 01bc2264bb..3c79fe7ae5 100644 --- a/test/connection_string/test/valid-options.json +++ b/test/connection_string/test/valid-options.json @@ -37,6 +37,25 @@ "options": { "tls": true } + }, + { + "description": "Colon in a key value pair", + "uri": "mongodb://example.com/?authMechanism=MONGODB-OIDC&authMechanismProperties=TOKEN_RESOURCE:mongodb://test-cluster", + "valid": true, + "warning": false, + "hosts": [ + { + "type": "hostname", + "host": "example.com", + "port": null + } + ], + "auth": null, + "options": { + "authmechanismProperties": { + "TOKEN_RESOURCE": "mongodb://test-cluster" + } + } } ] } diff --git a/test/connection_string/test/valid-warnings.json b/test/connection_string/test/valid-warnings.json index 1eacbf8fcb..f0e8288bc7 100644 --- a/test/connection_string/test/valid-warnings.json +++ b/test/connection_string/test/valid-warnings.json @@ -93,6 +93,21 @@ ], "auth": null, "options": null + }, + { + "description": "Comma in a key value pair causes a warning", + "uri": "mongodb://localhost?authMechanism=MONGODB-OIDC&authMechanismProperties=TOKEN_RESOURCE:mongodb://host1%2Chost2", + "valid": true, + "warning": true, + "hosts": [ + { + "type": "hostname", + "host": "localhost", + "port": null + } + ], + "auth": null, + "options": null } ] } diff --git a/test/server_selection_logging/operation-id.json b/test/server_selection_logging/operation-id.json index 276e4b8d6d..23af7a8a22 100644 --- a/test/server_selection_logging/operation-id.json +++ b/test/server_selection_logging/operation-id.json @@ -194,7 +194,7 @@ } }, { - "level": "info", + "level": "debug", "component": "serverSelection", "data": { "message": "Waiting for suitable server to become available", diff --git a/test/server_selection_logging/replica-set.json b/test/server_selection_logging/replica-set.json index 5eba784bf2..830b1ea51a 100644 --- a/test/server_selection_logging/replica-set.json +++ b/test/server_selection_logging/replica-set.json @@ -184,7 +184,7 @@ } }, { - "level": "info", + "level": "debug", "component": "serverSelection", "data": { "message": "Waiting for suitable server to become available", diff --git a/test/server_selection_logging/sharded.json b/test/server_selection_logging/sharded.json index d42fba9100..346c050f9e 100644 --- a/test/server_selection_logging/sharded.json +++ b/test/server_selection_logging/sharded.json @@ -193,7 +193,7 @@ } }, { - "level": "info", + "level": "debug", "component": "serverSelection", "data": { "message": "Waiting for suitable server to become available", diff --git a/test/server_selection_logging/standalone.json b/test/server_selection_logging/standalone.json index 3d8b10398c..3152d0bbf3 100644 --- a/test/server_selection_logging/standalone.json +++ b/test/server_selection_logging/standalone.json @@ -211,7 +211,7 @@ } }, { - "level": "info", + "level": "debug", "component": "serverSelection", "data": { "message": "Waiting for suitable server to become available", diff --git a/test/test_auth.py b/test/test_auth.py index 596c94d562..74089bd68e 100644 --- a/test/test_auth.py +++ b/test/test_auth.py @@ -343,6 +343,7 @@ def tearDown(self): client_context.drop_user("pymongo_test", "user") super().tearDown() + @client_context.require_no_fips def test_scram_sha1(self): host, port = client_context.host, client_context.port @@ -404,6 +405,7 @@ def test_scram_skip_empty_exchange(self): else: self.assertEqual(started, ["saslStart", "saslContinue", "saslContinue"]) + @client_context.require_no_fips def test_scram(self): # Step 1: create users client_context.create_user( diff --git a/test/test_bson.py b/test/test_bson.py index 89c0983ca5..fec84090d2 100644 --- a/test/test_bson.py +++ b/test/test_bson.py @@ -23,6 +23,7 @@ import os import pickle import re +import struct import sys import tempfile import uuid @@ -489,6 +490,33 @@ def test_basic_encode(self): b"\x00", ) + def test_bad_code(self): + # Assert that decoding invalid Code with scope does not include a field name. + def generate_payload(length: int) -> bytes: + string_size = length - 0x1E + + return bytes.fromhex( + struct.pack("", + "Malformed auth mechanism properties", ): parse_uri(uri) diff --git a/test/test_uri_spec.py b/test/test_uri_spec.py index 33a22330fa..f483a03842 100644 --- a/test/test_uri_spec.py +++ b/test/test_uri_spec.py @@ -27,7 +27,7 @@ from test import clear_warning_registry, unittest from pymongo.common import INTERNAL_URI_OPTION_NAME_MAP, validate -from pymongo.compression_support import _HAVE_SNAPPY +from pymongo.compression_support import _have_snappy from pymongo.uri_parser import SRV_SCHEME, parse_uri CONN_STRING_TEST_PATH = os.path.join( @@ -95,7 +95,7 @@ def modified_test_scenario(*args, **kwargs): def create_test(test, test_workdir): def run_scenario(self): compressors = (test.get("options") or {}).get("compressors", []) - if "snappy" in compressors and not _HAVE_SNAPPY: + if "snappy" in compressors and not _have_snappy(): self.skipTest("This test needs the snappy module.") valid = True warning = False diff --git a/test/unified_format.py b/test/unified_format.py index 3faa34d0e6..3f98b571bb 100644 --- a/test/unified_format.py +++ b/test/unified_format.py @@ -1030,6 +1030,12 @@ def setUpClass(cls): if "retryable-writes" in cls.TEST_SPEC["description"]: raise unittest.SkipTest("MMAPv1 does not support retryWrites=True") + @classmethod + def tearDownClass(cls): + for client in cls.mongos_clients: + client.close() + super().tearDownClass() + def setUp(self): super().setUp() # process schemaVersion @@ -1055,6 +1061,15 @@ def maybe_skip_test(self, spec): or "Cancel server check" in spec["description"] ): self.skipTest("MMAPv1 does not support retryWrites=True") + if ( + "Database-level aggregate with $out includes read preference for 5.0+ server" + in spec["description"] + ): + if client_context.version[0] == 8: + self.skipTest("waiting on PYTHON-4356") + if "Aggregate with $out includes read preference for 5.0+ server" in spec["description"]: + if client_context.version[0] == 8: + self.skipTest("waiting on PYTHON-4356") if "Client side error in command starting transaction" in spec["description"]: self.skipTest("Implement PYTHON-1894") if "timeoutMS applied to entire download" in spec["description"]: @@ -1112,6 +1127,8 @@ def maybe_skip_test(self, spec): self.skipTest("PyMongo does not support timeoutMode") def process_error(self, exception, spec): + if isinstance(exception, unittest.SkipTest): + raise is_error = spec.get("isError") is_client_error = spec.get("isClientError") is_timeout_error = spec.get("isTimeoutError") diff --git a/test/utils.py b/test/utils.py index 2caaa6fd99..15480dc440 100644 --- a/test/utils.py +++ b/test/utils.py @@ -39,9 +39,9 @@ from pymongo.cursor import CursorType from pymongo.errors import ConfigurationError, OperationFailure from pymongo.hello import HelloCompat +from pymongo.helpers import _SENSITIVE_COMMANDS from pymongo.lock import _create_lock from pymongo.monitoring import ( - _SENSITIVE_COMMANDS, ConnectionCheckedInEvent, ConnectionCheckedOutEvent, ConnectionCheckOutFailedEvent, diff --git a/tools/fail_if_no_c.py b/tools/fail_if_no_c.py index 95810c1a73..6848e155aa 100644 --- a/tools/fail_if_no_c.py +++ b/tools/fail_if_no_c.py @@ -29,6 +29,14 @@ import pymongo # noqa: E402 if not pymongo.has_c() or not bson.has_c(): + try: + from pymongo import _cmessage # type:ignore[attr-defined] # noqa: F401 + except Exception as e: + print(e) + try: + from bson import _cbson # type:ignore[attr-defined] # noqa: F401 + except Exception as e: + print(e) sys.exit("could not load C extensions") if os.environ.get("ENSURE_UNIVERSAL2") == "1": diff --git a/tox.ini b/tox.ini index eb9ae204e2..331c73ce18 100644 --- a/tox.ini +++ b/tox.ini @@ -31,8 +31,6 @@ envlist = doc-test, # Linkcheck sphinx docs linkcheck - # Check the sdist integrity. - manifest labels = # Use labels and -m instead of -e so that tox -m