diff --git a/.evergreen/activate-kms-venv.sh b/.evergreen/activate-kms-venv.sh deleted file mode 100755 index 4e8effc76c0..00000000000 --- a/.evergreen/activate-kms-venv.sh +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/bash -set -o errexit # Exit the script with error if any of the commands fail - -cd ${DRIVERS_TOOLS}/.evergreen/csfle -. ./activate-kmstlsvenv.sh - -if [ "Windows_NT" = "$OS" ]; then - echo "export PYTHON_EXEC='kmstlsvenv/Scripts/python.exe'" > prepare-kmsvenv.sh -else - echo "export PYTHON_EXEC='./kmstlsvenv/bin/python3'" > prepare-kmsvenv.sh -fi diff --git a/.evergreen/config.in.yml b/.evergreen/config.in.yml index ab21ef1f009..caa7f202e83 100644 --- a/.evergreen/config.in.yml +++ b/.evergreen/config.in.yml @@ -92,43 +92,6 @@ functions: sleep 1 docker ps - "bootstrap kms servers": - - command: subprocess.exec - params: - binary: bash - working_dir: src - args: - - .evergreen/activate-kms-venv.sh - env: - DRIVERS_TOOLS: ${DRIVERS_TOOLS} - - command: subprocess.exec - params: - background: true - binary: bash - working_dir: src - args: - - .evergreen/run-kmip-server.sh - env: - DRIVERS_TOOLS: ${DRIVERS_TOOLS} - - command: subprocess.exec - params: - background: true - binary: bash - working_dir: src - args: - - .evergreen/run-kms-servers.sh - env: - DRIVERS_TOOLS: ${DRIVERS_TOOLS} - - command: subprocess.exec - params: - background: true - working_dir: src - binary: bash - args: - - .evergreen/run-azure-kms-mock-server.sh - env: - DRIVERS_TOOLS: ${DRIVERS_TOOLS} - "run tests": - command: ec2.assume_role params: @@ -761,7 +724,6 @@ tasks: tags: ["serverless"] commands: - func: install dependencies - - func: bootstrap kms servers - func: "run serverless tests" - name: "test-gcpkms-task" diff --git a/.evergreen/config.yml b/.evergreen/config.yml index 384babe53f6..27c6ab3d078 100644 --- a/.evergreen/config.yml +++ b/.evergreen/config.yml @@ -65,42 +65,6 @@ functions: DRIVERS_TOOLS="${DRIVERS_TOOLS}" bash ${DRIVERS_TOOLS}/.evergreen/atlas_data_lake/run-mongohouse-image.sh sleep 1 docker ps - bootstrap kms servers: - - command: subprocess.exec - params: - binary: bash - working_dir: src - args: - - .evergreen/activate-kms-venv.sh - env: - DRIVERS_TOOLS: ${DRIVERS_TOOLS} - - command: subprocess.exec - params: - background: true - binary: bash - working_dir: src - args: - - .evergreen/run-kmip-server.sh - env: - DRIVERS_TOOLS: ${DRIVERS_TOOLS} - - command: subprocess.exec - params: - background: true - binary: bash - working_dir: src - args: - - .evergreen/run-kms-servers.sh - env: - DRIVERS_TOOLS: ${DRIVERS_TOOLS} - - command: subprocess.exec - params: - background: true - working_dir: src - binary: bash - args: - - .evergreen/run-azure-kms-mock-server.sh - env: - DRIVERS_TOOLS: ${DRIVERS_TOOLS} run tests: - command: ec2.assume_role params: @@ -685,7 +649,6 @@ tasks: - serverless commands: - func: install dependencies - - func: bootstrap kms servers - func: run serverless tests - name: test-gcpkms-task commands: @@ -941,7 +904,6 @@ tasks: - {key: AUTH, value: auth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-latest-replica_set tags: @@ -957,7 +919,6 @@ tasks: - {key: AUTH, value: auth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-latest-sharded_cluster tags: @@ -973,7 +934,6 @@ tasks: - {key: AUTH, value: auth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-rapid-server tags: @@ -989,7 +949,6 @@ tasks: - {key: AUTH, value: auth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-rapid-replica_set tags: @@ -1005,7 +964,6 @@ tasks: - {key: AUTH, value: auth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-rapid-sharded_cluster tags: @@ -1021,7 +979,6 @@ tasks: - {key: AUTH, value: auth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-8.0-server tags: @@ -1037,7 +994,6 @@ tasks: - {key: AUTH, value: auth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-8.0-replica_set tags: @@ -1053,7 +1009,6 @@ tasks: - {key: AUTH, value: auth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-8.0-sharded_cluster tags: @@ -1069,7 +1024,6 @@ tasks: - {key: AUTH, value: auth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-7.0-server tags: @@ -1085,7 +1039,6 @@ tasks: - {key: AUTH, value: auth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-7.0-replica_set tags: @@ -1101,7 +1054,6 @@ tasks: - {key: AUTH, value: auth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-7.0-sharded_cluster tags: @@ -1117,7 +1069,6 @@ tasks: - {key: AUTH, value: auth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-6.0-server tags: @@ -1133,7 +1084,6 @@ tasks: - {key: AUTH, value: auth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-6.0-replica_set tags: @@ -1149,7 +1099,6 @@ tasks: - {key: AUTH, value: auth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-6.0-sharded_cluster tags: @@ -1165,7 +1114,6 @@ tasks: - {key: AUTH, value: auth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-5.0-server tags: @@ -1181,7 +1129,6 @@ tasks: - {key: AUTH, value: auth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-5.0-replica_set tags: @@ -1197,7 +1144,6 @@ tasks: - {key: AUTH, value: auth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-5.0-sharded_cluster tags: @@ -1213,7 +1159,6 @@ tasks: - {key: AUTH, value: auth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-4.4-server tags: @@ -1229,7 +1174,6 @@ tasks: - {key: AUTH, value: auth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-4.4-replica_set tags: @@ -1245,7 +1189,6 @@ tasks: - {key: AUTH, value: auth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-4.4-sharded_cluster tags: @@ -1261,7 +1204,6 @@ tasks: - {key: AUTH, value: auth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-4.2-server tags: @@ -1277,7 +1219,6 @@ tasks: - {key: AUTH, value: auth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-4.2-replica_set tags: @@ -1293,7 +1234,6 @@ tasks: - {key: AUTH, value: auth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-4.2-sharded_cluster tags: @@ -1309,7 +1249,6 @@ tasks: - {key: AUTH, value: auth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-4.0-server tags: @@ -1325,7 +1264,6 @@ tasks: - {key: AUTH, value: auth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-4.0-replica_set tags: @@ -1341,7 +1279,6 @@ tasks: - {key: AUTH, value: auth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-4.0-sharded_cluster tags: @@ -1357,7 +1294,6 @@ tasks: - {key: AUTH, value: auth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-latest-server-v1-api tags: @@ -1378,7 +1314,6 @@ tasks: - {key: CLIENT_ENCRYPTION, value: 'true'} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-x509-authentication tags: @@ -1559,7 +1494,6 @@ tasks: - {key: TOPOLOGY, value: replica_set} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run socks5 tests - name: test-socks5-csfle tags: @@ -1574,7 +1508,6 @@ tasks: - {key: TEST_SOCKS5_CSFLE, value: 'true'} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run socks5 tests - name: test-socks5-tls tags: [] @@ -2257,7 +2190,6 @@ tasks: - {key: CLIENT_ENCRYPTION, value: 'true'} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: install mongodb-client-encryption - func: assume secrets manager rule - func: run custom csfle tests @@ -2276,7 +2208,6 @@ tasks: - {key: CLIENT_ENCRYPTION, value: 'true'} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: install mongodb-client-encryption - func: assume secrets manager rule - func: run custom csfle tests @@ -2295,7 +2226,6 @@ tasks: - {key: CLIENT_ENCRYPTION, value: 'true'} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: install mongodb-client-encryption - func: assume secrets manager rule - func: run custom csfle tests @@ -2314,7 +2244,6 @@ tasks: - {key: CLIENT_ENCRYPTION, value: 'true'} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: install package vars: PACKAGE: mongodb-client-encryption@6.0.0 @@ -2335,7 +2264,6 @@ tasks: - {key: MONGODB_BINARIES, value: '${PROJECT_DIRECTORY}/mongodb/bin'} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: assume secrets manager rule - func: build and test alpine FLE - name: test-latest-server-noauth @@ -2353,7 +2281,6 @@ tasks: - {key: AUTH, value: noauth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-latest-replica_set-noauth tags: @@ -2370,7 +2297,6 @@ tasks: - {key: AUTH, value: noauth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-latest-sharded_cluster-noauth tags: @@ -2387,7 +2313,6 @@ tasks: - {key: AUTH, value: noauth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-rapid-server-noauth tags: @@ -2404,7 +2329,6 @@ tasks: - {key: AUTH, value: noauth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-rapid-replica_set-noauth tags: @@ -2421,7 +2345,6 @@ tasks: - {key: AUTH, value: noauth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-rapid-sharded_cluster-noauth tags: @@ -2438,7 +2361,6 @@ tasks: - {key: AUTH, value: noauth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-8.0-server-noauth tags: @@ -2455,7 +2377,6 @@ tasks: - {key: AUTH, value: noauth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-8.0-replica_set-noauth tags: @@ -2472,7 +2393,6 @@ tasks: - {key: AUTH, value: noauth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-8.0-sharded_cluster-noauth tags: @@ -2489,7 +2409,6 @@ tasks: - {key: AUTH, value: noauth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-7.0-server-noauth tags: @@ -2506,7 +2425,6 @@ tasks: - {key: AUTH, value: noauth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-7.0-replica_set-noauth tags: @@ -2523,7 +2441,6 @@ tasks: - {key: AUTH, value: noauth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-7.0-sharded_cluster-noauth tags: @@ -2540,7 +2457,6 @@ tasks: - {key: AUTH, value: noauth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-6.0-server-noauth tags: @@ -2557,7 +2473,6 @@ tasks: - {key: AUTH, value: noauth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-6.0-replica_set-noauth tags: @@ -2574,7 +2489,6 @@ tasks: - {key: AUTH, value: noauth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-6.0-sharded_cluster-noauth tags: @@ -2591,7 +2505,6 @@ tasks: - {key: AUTH, value: noauth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-5.0-server-noauth tags: @@ -2608,7 +2521,6 @@ tasks: - {key: AUTH, value: noauth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-5.0-replica_set-noauth tags: @@ -2625,7 +2537,6 @@ tasks: - {key: AUTH, value: noauth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-5.0-sharded_cluster-noauth tags: @@ -2642,7 +2553,6 @@ tasks: - {key: AUTH, value: noauth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-4.4-server-noauth tags: @@ -2659,7 +2569,6 @@ tasks: - {key: AUTH, value: noauth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-4.4-replica_set-noauth tags: @@ -2676,7 +2585,6 @@ tasks: - {key: AUTH, value: noauth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-4.4-sharded_cluster-noauth tags: @@ -2693,7 +2601,6 @@ tasks: - {key: AUTH, value: noauth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-4.2-server-noauth tags: @@ -2710,7 +2617,6 @@ tasks: - {key: AUTH, value: noauth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-4.2-replica_set-noauth tags: @@ -2727,7 +2633,6 @@ tasks: - {key: AUTH, value: noauth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-4.2-sharded_cluster-noauth tags: @@ -2744,7 +2649,6 @@ tasks: - {key: AUTH, value: noauth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-4.0-server-noauth tags: @@ -2761,7 +2665,6 @@ tasks: - {key: AUTH, value: noauth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-4.0-replica_set-noauth tags: @@ -2778,7 +2681,6 @@ tasks: - {key: AUTH, value: noauth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-4.0-sharded_cluster-noauth tags: @@ -2795,7 +2697,6 @@ tasks: - {key: AUTH, value: noauth} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-lambda-example tags: @@ -2845,7 +2746,6 @@ tasks: - {key: TEST_NPM_SCRIPT, value: check:csfle} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-rapid-csfle-mongocryptd tags: @@ -2862,7 +2762,6 @@ tasks: - {key: TEST_NPM_SCRIPT, value: check:csfle} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-8.0-csfle-mongocryptd tags: @@ -2879,7 +2778,6 @@ tasks: - {key: TEST_NPM_SCRIPT, value: check:csfle} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-7.0-csfle-mongocryptd tags: @@ -2896,7 +2794,6 @@ tasks: - {key: TEST_NPM_SCRIPT, value: check:csfle} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-6.0-csfle-mongocryptd tags: @@ -2913,7 +2810,6 @@ tasks: - {key: TEST_NPM_SCRIPT, value: check:csfle} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-5.0-csfle-mongocryptd tags: @@ -2930,7 +2826,6 @@ tasks: - {key: TEST_NPM_SCRIPT, value: check:csfle} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-4.4-csfle-mongocryptd tags: @@ -2947,7 +2842,6 @@ tasks: - {key: TEST_NPM_SCRIPT, value: check:csfle} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests - name: test-4.2-csfle-mongocryptd tags: @@ -2964,7 +2858,6 @@ tasks: - {key: TEST_NPM_SCRIPT, value: check:csfle} - func: install dependencies - func: bootstrap mongo-orchestration - - func: bootstrap kms servers - func: run tests task_groups: - name: serverless_task_group @@ -3827,14 +3720,6 @@ buildvariants: - run-custom-csfle-tests-rapid - run-custom-csfle-tests-latest - test-latest-driver-mongodb-client-encryption-6.0.0 - - name: rhel8-test-serverless - display_name: Serverless Test - run_on: rhel80-large - expansions: - NODE_LTS_VERSION: 16 - NPM_VERSION: 9 - tasks: - - serverless_task_group - name: rhel8-test-gcp-kms display_name: GCP KMS Test run_on: debian11-small diff --git a/.evergreen/generate_evergreen_tasks.js b/.evergreen/generate_evergreen_tasks.js index da4ed1ad61d..b4ff97688ee 100644 --- a/.evergreen/generate_evergreen_tasks.js +++ b/.evergreen/generate_evergreen_tasks.js @@ -73,7 +73,6 @@ function makeTask({ mongoVersion, topology, tags = [], auth = 'auth' }) { updateExpansions({ VERSION: mongoVersion, TOPOLOGY: topology, AUTH: auth }), { func: 'install dependencies' }, { func: 'bootstrap mongo-orchestration' }, - { func: 'bootstrap kms servers' }, { func: 'run tests' } ] }; @@ -111,7 +110,6 @@ BASE_TASKS.push({ }), { func: 'install dependencies' }, { func: 'bootstrap mongo-orchestration' }, - { func: 'bootstrap kms servers' }, { func: 'run tests' } ] }); @@ -168,7 +166,7 @@ TASKS.push( }), { func: 'install dependencies' }, { func: 'run kerberos tests' } - ] + ] }, { name: 'test-auth-ldap', @@ -185,7 +183,6 @@ TASKS.push( }), { func: 'install dependencies' }, { func: 'bootstrap mongo-orchestration' }, - { func: 'bootstrap kms servers' }, { func: 'run socks5 tests' } ] }, @@ -200,7 +197,6 @@ TASKS.push( }), { func: 'install dependencies' }, { func: 'bootstrap mongo-orchestration' }, - { func: 'bootstrap kms servers' }, { func: 'run socks5 tests' } ] }, @@ -473,7 +469,6 @@ const MONGOCRYPTD_CSFLE_TASKS = MONGODB_VERSIONS.filter( }), { func: 'install dependencies' }, { func: 'bootstrap mongo-orchestration' }, - { func: 'bootstrap kms servers' }, { func: 'run tests' } ] }; @@ -682,7 +677,6 @@ for (const version of ['5.0', 'rapid', 'latest']) { }), { func: 'install dependencies' }, { func: 'bootstrap mongo-orchestration' }, - { func: 'bootstrap kms servers' }, { func: 'install mongodb-client-encryption' }, { func: 'assume secrets manager rule' }, { func: 'run custom csfle tests' } @@ -703,7 +697,6 @@ customDependencyTests.push({ }), { func: 'install dependencies' }, { func: 'bootstrap mongo-orchestration' }, - { func: 'bootstrap kms servers' }, { func: 'install package', vars: { @@ -750,7 +743,6 @@ SINGLETON_TASKS.push( }), { func: 'install dependencies' }, { func: 'bootstrap mongo-orchestration' }, - { func: 'bootstrap kms servers' }, { func: 'assume secrets manager rule' }, { func: 'build and test alpine FLE' } ] @@ -814,17 +806,17 @@ BUILD_VARIANTS.push({ tasks: customDependencyTests.map(({ name }) => name) }); -// special case for serverless testing -BUILD_VARIANTS.push({ - name: 'rhel8-test-serverless', - display_name: 'Serverless Test', - run_on: DEFAULT_OS, - expansions: { - NODE_LTS_VERSION: LOWEST_LTS, - NPM_VERSION: 9 - }, - tasks: ['serverless_task_group'] -}); +// TODO(NODE-6786): Reenable serverless testing. +// BUILD_VARIANTS.push({ +// name: 'rhel8-test-serverless', +// display_name: 'Serverless Test', +// run_on: DEFAULT_OS, +// expansions: { +// NODE_LTS_VERSION: LOWEST_LTS, +// NPM_VERSION: 9 +// }, +// tasks: ['serverless_task_group'] +// }); BUILD_VARIANTS.push({ name: 'rhel8-test-gcp-kms', diff --git a/.evergreen/prepare-crypt-shared-lib.sh b/.evergreen/prepare-crypt-shared-lib.sh deleted file mode 100755 index b716f9e3401..00000000000 --- a/.evergreen/prepare-crypt-shared-lib.sh +++ /dev/null @@ -1,34 +0,0 @@ -MONGODB_VERSION=${VERSION} -if [ -z "$MONGODB_VERSION" ]; then - # default to latest to match behavior of run-orchestration.sh. - MONGODB_VERSION=latest -fi - -. $DRIVERS_TOOLS/.evergreen/download-mongodb.sh -get_distro -# get_distro defines $DISTRO. -echo "distro='$DISTRO' version='$MONGODB_VERSION'". -get_mongodb_download_url_for "$DISTRO" "$MONGODB_VERSION" -# get_mongodb_download_url_for defines $MONGO_CRYPT_SHARED_DOWNLOAD_URL and $EXTRACT. -if [ -z "$MONGO_CRYPT_SHARED_DOWNLOAD_URL" ]; then - echo "There is no crypt_shared library for distro='$DISTRO' and version='$MONGODB_VERSION'". -else - echo "Downloading crypt_shared package from $MONGO_CRYPT_SHARED_DOWNLOAD_URL" - download_and_extract_crypt_shared "$MONGO_CRYPT_SHARED_DOWNLOAD_URL" "$EXTRACT" - CRYPT_SHARED_LIB_PATH="$(find $(pwd) -maxdepth 1 -type f \ - -name 'mongo_crypt_v1.so' -o \ - -name 'mongo_crypt_v1.dll' -o \ - -name 'mongo_crypt_v1.dylib')" - # Expect that we always find a crypt_shared library file and set the CRYPT_SHARED_LIB_PATH - # environment variable. If we didn't, print an error message and exit. - if [ -z "$CRYPT_SHARED_LIB_PATH" ]; then - echo 'CRYPT_SHARED_LIB_PATH is empty. Exiting.' - exit 1 - fi - # If we're on Windows, convert the "cygdrive" path to Windows-style paths. - if [ "Windows_NT" = "$OS" ]; then - CRYPT_SHARED_LIB_PATH=$(cygpath -m $CRYPT_SHARED_LIB_PATH) - fi - echo "CRYPT_SHARED_LIB_PATH: $CRYPT_SHARED_LIB_PATH" - echo "export CRYPT_SHARED_LIB_PATH=$CRYPT_SHARED_LIB_PATH" >crypt_shared.sh -fi diff --git a/.evergreen/run-azure-kms-mock-server.sh b/.evergreen/run-azure-kms-mock-server.sh deleted file mode 100644 index 944cdcd27aa..00000000000 --- a/.evergreen/run-azure-kms-mock-server.sh +++ /dev/null @@ -1,12 +0,0 @@ -#! /user/bin/env bash - -if [ -z ${DRIVERS_TOOLS+omitted} ]; then echo "DRIVERS_TOOLS is unset" && exit 1; fi - -set -o errexit - -pushd $DRIVERS_TOOLS/.evergreen/csfle -. ./activate-kmstlsvenv.sh -python bottle.py fake_azure:imds & -popd - -echo "Running Azure KMS idms server on port 8080" diff --git a/.evergreen/run-kmip-server.sh b/.evergreen/run-kmip-server.sh deleted file mode 100755 index c0a3f0f691d..00000000000 --- a/.evergreen/run-kmip-server.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash -set -o errexit # Exit the script with error if any of the commands fail - -cd ${DRIVERS_TOOLS}/.evergreen/csfle -. ./prepare-kmsvenv.sh - -echo "$PYTHON_EXEC" - -$PYTHON_EXEC -u kms_kmip_server.py \ - --ca_file ../x509gen/ca.pem \ - --cert_file ../x509gen/server.pem \ - --port 5698 diff --git a/.evergreen/run-kms-servers.sh b/.evergreen/run-kms-servers.sh deleted file mode 100755 index 8181a4d2f51..00000000000 --- a/.evergreen/run-kms-servers.sh +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/bash -set -o errexit # Exit the script with error if any of the commands fail - -cd ${DRIVERS_TOOLS}/.evergreen/csfle -. ./prepare-kmsvenv.sh - -echo "$PYTHON_EXEC" - -$PYTHON_EXEC -u kms_http_server.py --ca_file ../x509gen/ca.pem --cert_file ../x509gen/expired.pem --port 8000 & -$PYTHON_EXEC -u kms_http_server.py --ca_file ../x509gen/ca.pem --cert_file ../x509gen/wrong-host.pem --port 8001 & -$PYTHON_EXEC -u kms_http_server.py --ca_file ../x509gen/ca.pem --cert_file ../x509gen/server.pem --port 8002 --require_client_cert & diff --git a/.evergreen/setup-fle.sh b/.evergreen/setup-fle.sh index 82877177ee2..a14bfbf4c53 100644 --- a/.evergreen/setup-fle.sh +++ b/.evergreen/setup-fle.sh @@ -3,11 +3,13 @@ bash ${DRIVERS_TOOLS}/.evergreen/csfle/setup-secrets.sh source secrets-export.sh +# start KMS servers +bash ${DRIVERS_TOOLS}/.evergreen/csfle/start-servers.sh + if [ -z "${RUN_WITH_MONGOCRYPTD}" ]; then - # Set up crypt shared lib if we don't want to use mongocryptd - bash .evergreen/prepare-crypt-shared-lib.sh - source crypt_shared.sh - echo "CRYPT_SHARED_LIB_PATH: $CRYPT_SHARED_LIB_PATH" + echo "crypt shared: $CRYPT_SHARED_LIB_PATH" else - echo "CRYPT_SHARED_LIB_PATH not set; using mongocryptd" + rm $CRYPT_SHARED_LIB_PATH + unset CRYPT_SHARED_LIB_PATH + echo "CRYPT_SHARED_LIB_PATH not set; using mongocryptd" fi diff --git a/.gitignore b/.gitignore index f04e671f84f..d0fdc406efd 100644 --- a/.gitignore +++ b/.gitignore @@ -99,6 +99,7 @@ test/lambda/env.json # files generated by tooling in drivers-evergreen-tools secrets-export.sh +secrets-export.fish mo-expansion.sh mo-expansion.yml expansions.sh diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 50ec80bd349..4d877d57f47 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "6.13.1" + ".": "6.14.0" } diff --git a/HISTORY.md b/HISTORY.md index 28f5b62d1c5..0d3a0ddae90 100644 --- a/HISTORY.md +++ b/HISTORY.md @@ -2,6 +2,21 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. +## [6.14.0](https://github.com/mongodb/node-mongodb-native/compare/v6.13.1...v6.14.0) (2025-02-28) + + +### Features + +* **NODE-6676:** add support for `nsType` in change stream create events ([#4431](https://github.com/mongodb/node-mongodb-native/issues/4431)) ([7800067](https://github.com/mongodb/node-mongodb-native/commit/7800067ad0ea4e24e3180fe264da4951b883e08d)) +* **NODE-6773:** add support for $lookup with automatic encryption ([#4427](https://github.com/mongodb/node-mongodb-native/issues/4427)) ([965b21a](https://github.com/mongodb/node-mongodb-native/commit/965b21a20eff38c7fe7d9036f912b31c10e5ba21)) + + +### Bug Fixes + +* **NODE-6765:** FindOneAndUpdateOptions supports aggregation expressions ([#4423](https://github.com/mongodb/node-mongodb-native/issues/4423)) ([421ddeb](https://github.com/mongodb/node-mongodb-native/commit/421ddeb329169f47b567cbe432c39737551bd234)) +* **NODE-6792:** use isUint8Array from driver's utils instead of util/types ([#4436](https://github.com/mongodb/node-mongodb-native/issues/4436)) ([dfe1fba](https://github.com/mongodb/node-mongodb-native/commit/dfe1fba777d89a53106487e38aaf5072978096b5)) +* **NODE-6794:** revert `@aws-sdk/credential-providers` peer compatibility change ([#4437](https://github.com/mongodb/node-mongodb-native/issues/4437)) ([488c407](https://github.com/mongodb/node-mongodb-native/commit/488c4071632c29960de8955bfbefbaab163b45ba)) + ## [6.13.1](https://github.com/mongodb/node-mongodb-native/compare/v6.13.0...v6.13.1) (2025-02-20) diff --git a/etc/bash_to_fish.mjs b/etc/bash_to_fish.mjs new file mode 100644 index 00000000000..09cfe054110 --- /dev/null +++ b/etc/bash_to_fish.mjs @@ -0,0 +1,39 @@ +import { createReadStream, promises as fs } from 'node:fs'; +import path from 'node:path'; +import readline from 'node:readline/promises'; + +/** + * Takes an "exports" only bash script file + * and converts it to fish syntax. + * Will crash on any line that isn't: + * - a comment + * - an empty line + * - a bash 'set' call + * - export VAR=VAL + */ + +const fileName = process.argv[2]; +const outFileName = path.basename(fileName, '.sh') + '.fish'; +const input = createReadStream(process.argv[2]); +const lines = readline.createInterface({ input }); +const output = await fs.open(outFileName, 'w'); + +for await (let line of lines) { + line = line.trim(); + + if (!line.startsWith('export ')) { + if (line.startsWith('#')) continue; + if (line === '') continue; + if (line.startsWith('set')) continue; + throw new Error('Cannot translate: ' + line); + } + + const varVal = line.slice('export '.length); + const variable = varVal.slice(0, varVal.indexOf('=')); + const value = varVal.slice(varVal.indexOf('=') + 1); + await output.appendFile(`set -x ${variable} ${value}\n`); +} + +output.close(); +input.close(); +lines.close(); diff --git a/package-lock.json b/package-lock.json index ea9ce112329..8a56d5df388 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "mongodb", - "version": "6.13.1", + "version": "6.14.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "mongodb", - "version": "6.13.1", + "version": "6.14.0", "license": "Apache-2.0", "dependencies": { "@mongodb-js/saslprep": "^1.1.9", @@ -48,7 +48,7 @@ "js-yaml": "^4.1.0", "mocha": "^10.8.2", "mocha-sinon": "^2.1.2", - "mongodb-client-encryption": "^6.2.0", + "mongodb-client-encryption": "^6.3.0", "mongodb-legacy": "^6.1.3", "nyc": "^15.1.0", "prettier": "^3.4.2", @@ -69,7 +69,7 @@ "node": ">=16.20.1" }, "peerDependencies": { - "@aws-sdk/credential-providers": "^3.632.0", + "@aws-sdk/credential-providers": "^3.188.0", "@mongodb-js/zstd": "^1.1.0 || ^2.0.0", "gcp-metadata": "^5.2.0", "kerberos": "^2.0.1", @@ -6693,9 +6693,9 @@ } }, "node_modules/mongodb-client-encryption": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/mongodb-client-encryption/-/mongodb-client-encryption-6.2.0.tgz", - "integrity": "sha512-jfOCthPH0jxd9RJCerNbf1aRAcUJFwiWikJ2j9oBPRc+Oets3aKUriyZe4n16sF3Ibc1xar1zNInAfHEcVtYRg==", + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/mongodb-client-encryption/-/mongodb-client-encryption-6.3.0.tgz", + "integrity": "sha512-OaOg02vglPxxrfY01alC0ER0W4WMuNO2ZJR3ehAUcuGYreJaJ+aX+rUQiQkdQHiXvnVPDUx/4QDr2CR1/FvpcQ==", "dev": true, "hasInstallScript": true, "license": "Apache-2.0", diff --git a/package.json b/package.json index fb7f68574b4..d16c17c46cd 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "mongodb", - "version": "6.13.1", + "version": "6.14.0", "description": "The official MongoDB driver for Node.js", "main": "lib/index.js", "files": [ @@ -30,7 +30,7 @@ "mongodb-connection-string-url": "^3.0.0" }, "peerDependencies": { - "@aws-sdk/credential-providers": "^3.632.0", + "@aws-sdk/credential-providers": "^3.188.0", "@mongodb-js/zstd": "^1.1.0 || ^2.0.0", "gcp-metadata": "^5.2.0", "kerberos": "^2.0.1", @@ -96,7 +96,7 @@ "js-yaml": "^4.1.0", "mocha": "^10.8.2", "mocha-sinon": "^2.1.2", - "mongodb-client-encryption": "^6.2.0", + "mongodb-client-encryption": "^6.3.0", "mongodb-legacy": "^6.1.3", "nyc": "^15.1.0", "prettier": "^3.4.2", diff --git a/src/change_stream.ts b/src/change_stream.ts index 1526ca3c756..7f7551f2d82 100644 --- a/src/change_stream.ts +++ b/src/change_stream.ts @@ -419,6 +419,13 @@ export interface ChangeStreamCreateDocument ChangeStreamDocumentCollectionUUID { /** Describes the type of operation represented in this change notification */ operationType: 'create'; + + /** + * The type of the newly created object. + * + * @sinceServerVersion 8.1.0 + */ + nsType?: 'collection' | 'timeseries' | 'view'; } /** diff --git a/src/client-side-encryption/auto_encrypter.ts b/src/client-side-encryption/auto_encrypter.ts index 1d7a9de4c66..a24f8cd6da6 100644 --- a/src/client-side-encryption/auto_encrypter.ts +++ b/src/client-side-encryption/auto_encrypter.ts @@ -239,6 +239,7 @@ export class AutoEncrypter { this._kmsProviders = options.kmsProviders || {}; const mongoCryptOptions: MongoCryptOptions = { + enableMultipleCollinfo: true, cryptoCallbacks }; if (options.schemaMap) { diff --git a/src/client-side-encryption/state_machine.ts b/src/client-side-encryption/state_machine.ts index 096c4cfc635..c45fdf1f093 100644 --- a/src/client-side-encryption/state_machine.ts +++ b/src/client-side-encryption/state_machine.ts @@ -16,6 +16,7 @@ import { getSocks, type SocksLib } from '../deps'; import { MongoOperationTimeoutError } from '../error'; import { type MongoClient, type MongoClientOptions } from '../mongo_client'; import { type Abortable } from '../mongo_types'; +import { type CollectionInfo } from '../operations/list_collections'; import { Timeout, type TimeoutContext, TimeoutError } from '../timeout'; import { addAbortListener, @@ -205,11 +206,19 @@ export class StateMachine { const mongocryptdManager = executor._mongocryptdManager; let result: Uint8Array | null = null; - while (context.state !== MONGOCRYPT_CTX_DONE && context.state !== MONGOCRYPT_CTX_ERROR) { + // Typescript treats getters just like properties: Once you've tested it for equality + // it cannot change. Which is exactly the opposite of what we use state and status for. + // Every call to at least `addMongoOperationResponse` and `finalize` can change the state. + // These wrappers let us write code more naturally and not add compiler exceptions + // to conditions checks inside the state machine. + const getStatus = () => context.status; + const getState = () => context.state; + + while (getState() !== MONGOCRYPT_CTX_DONE && getState() !== MONGOCRYPT_CTX_ERROR) { options.signal?.throwIfAborted(); - debug(`[context#${context.id}] ${stateToString.get(context.state) || context.state}`); + debug(`[context#${context.id}] ${stateToString.get(getState()) || getState()}`); - switch (context.state) { + switch (getState()) { case MONGOCRYPT_CTX_NEED_MONGO_COLLINFO: { const filter = deserialize(context.nextMongoOperation()); if (!metaDataClient) { @@ -218,22 +227,28 @@ export class StateMachine { ); } - const collInfo = await this.fetchCollectionInfo( + const collInfoCursor = this.fetchCollectionInfo( metaDataClient, context.ns, filter, options ); - if (collInfo) { - context.addMongoOperationResponse(collInfo); + + for await (const collInfo of collInfoCursor) { + context.addMongoOperationResponse(serialize(collInfo)); + if (getState() === MONGOCRYPT_CTX_ERROR) break; } + if (getState() === MONGOCRYPT_CTX_ERROR) break; + context.finishMongoOperation(); break; } case MONGOCRYPT_CTX_NEED_MONGO_MARKINGS: { const command = context.nextMongoOperation(); + if (getState() === MONGOCRYPT_CTX_ERROR) break; + if (!mongocryptdClient) { throw new MongoCryptError( 'unreachable state machine state: entered MONGOCRYPT_CTX_NEED_MONGO_MARKINGS but mongocryptdClient is undefined' @@ -283,9 +298,8 @@ export class StateMachine { case MONGOCRYPT_CTX_READY: { const finalizedContext = context.finalize(); - // @ts-expect-error finalize can change the state, check for error - if (context.state === MONGOCRYPT_CTX_ERROR) { - const message = context.status.message || 'Finalization error'; + if (getState() === MONGOCRYPT_CTX_ERROR) { + const message = getStatus().message || 'Finalization error'; throw new MongoCryptError(message); } result = finalizedContext; @@ -293,12 +307,12 @@ export class StateMachine { } default: - throw new MongoCryptError(`Unknown state: ${context.state}`); + throw new MongoCryptError(`Unknown state: ${getState()}`); } } - if (context.state === MONGOCRYPT_CTX_ERROR || result == null) { - const message = context.status.message; + if (getState() === MONGOCRYPT_CTX_ERROR || result == null) { + const message = getStatus().message; if (!message) { debug( `unidentifiable error in MongoCrypt - received an error status from \`libmongocrypt\` but received no error message.` @@ -527,12 +541,12 @@ export class StateMachine { * @param filter - A filter for the listCollections command * @param callback - Invoked with the info of the requested collection, or with an error */ - async fetchCollectionInfo( + fetchCollectionInfo( client: MongoClient, ns: string, filter: Document, options?: { timeoutContext?: TimeoutContext } & Abortable - ): Promise { + ): AsyncIterable { const { db } = MongoDBCollectionNamespace.fromString(ns); const cursor = client.db(db).listCollections(filter, { @@ -540,16 +554,11 @@ export class StateMachine { promoteValues: false, timeoutContext: options?.timeoutContext && new CursorTimeoutContext(options?.timeoutContext, Symbol()), - signal: options?.signal + signal: options?.signal, + nameOnly: false }); - // There is always exactly zero or one matching documents, so this should always exhaust the cursor - // in a single batch. We call `toArray()` just to be safe and ensure that the cursor is always - // exhausted and closed. - const collections = await cursor.toArray(); - - const info = collections.length > 0 ? serialize(collections[0]) : null; - return info; + return cursor; } /** diff --git a/src/collection.ts b/src/collection.ts index 19a0682d8b2..a2df98ae2da 100644 --- a/src/collection.ts +++ b/src/collection.ts @@ -966,32 +966,40 @@ export class Collection { /** * Find a document and update it in one atomic operation. Requires a write lock for the duration of the operation. * + * The value of `update` can be either: + * - UpdateFilter - A document that contains update operator expressions, + * - Document[] - an aggregation pipeline consisting of the following stages: + * - $addFields and its alias $set + * - $project and its alias $unset + * - $replaceRoot and its alias $replaceWith. + * See the [findAndModify command documentation](https://www.mongodb.com/docs/manual/reference/command/findAndModify) for details. + * * @param filter - The filter used to select the document to update - * @param update - Update operations to be performed on the document + * @param update - The modifications to apply * @param options - Optional settings for the command */ async findOneAndUpdate( filter: Filter, - update: UpdateFilter, + update: UpdateFilter | Document[], options: FindOneAndUpdateOptions & { includeResultMetadata: true } ): Promise>; async findOneAndUpdate( filter: Filter, - update: UpdateFilter, + update: UpdateFilter | Document[], options: FindOneAndUpdateOptions & { includeResultMetadata: false } ): Promise | null>; async findOneAndUpdate( filter: Filter, - update: UpdateFilter, + update: UpdateFilter | Document[], options: FindOneAndUpdateOptions ): Promise | null>; async findOneAndUpdate( filter: Filter, - update: UpdateFilter + update: UpdateFilter | Document[] ): Promise | null>; async findOneAndUpdate( filter: Filter, - update: UpdateFilter, + update: UpdateFilter | Document[], options?: FindOneAndUpdateOptions ): Promise | ModifyResult | null> { return await executeOperation( diff --git a/src/mongo_logger.ts b/src/mongo_logger.ts index 90cfac37153..73d8203ec7a 100644 --- a/src/mongo_logger.ts +++ b/src/mongo_logger.ts @@ -1,5 +1,4 @@ import { inspect, promisify } from 'util'; -import { isUint8Array } from 'util/types'; import { type Binary, @@ -75,7 +74,7 @@ import type { ServerSelectionSucceededEvent, WaitingForSuitableServerEvent } from './sdam/server_selection_events'; -import { HostAddress, isPromiseLike, parseUnsignedInteger } from './utils'; +import { HostAddress, isPromiseLike, isUint8Array, parseUnsignedInteger } from './utils'; /** * @public diff --git a/test/benchmarks/driver_bench/readme.md b/test/benchmarks/driver_bench/readme.md index 81f8b859745..d660a997672 100644 --- a/test/benchmarks/driver_bench/readme.md +++ b/test/benchmarks/driver_bench/readme.md @@ -51,6 +51,8 @@ type BenchmarkModule = { run: () => Promise; afterEach?: () => Promise; after?: () => Promise; + + tags?: string[]; }; ``` @@ -58,6 +60,21 @@ Just like mocha we have once before and once after as well as before each and af The `driver.mts` module is intended to hold various helpers for setup and teardown and help abstract some of the driver API. +## Benchmark tags +The `tags` property of `BenchmarkModule` is where a benchmark's tags should be added to facilitate +performance alerting and filter of results via our internal tools. + +Tags are defined in `driver.mts` under the `TAG` enum. +Whenever a new tag is defined it should be documented in the table below . + +| tag variable name | tag string value | purpose | +|-------------------|------------------------|--------------------------------------------------------------------------------------------------------------------------------------| +| `TAG.spec` | `'spec-benchmark'` | Special tag that marks a benchmark as a spec-required benchmark | +| `TAG.alert` | `'alerting-benchmark'` | Special tag that enables our perf monitoring tooling to create alerts when regressions in this benchmark's performance are detected | +| `TAG.cursor` | `'cursor-benchmark'` | Tag marking a benchmark as being related to cursor performance | +| `TAG.read` | `'read-benchmark'` | Tag marking a benchmark as being related to read performance | +| `TAG.write` | `'write-benchmark'` | Tag marking a benchmark as being related to write performance | + ## Wishlist - Make it so runner can handle: `./lib/suites/multi_bench/grid_fs_upload.mjs` as an argument so shell path autocomplete makes it easier to pick a benchmark diff --git a/test/benchmarks/driver_bench/src/driver.mts b/test/benchmarks/driver_bench/src/driver.mts index ab172fe7bd6..dffe48408d6 100644 --- a/test/benchmarks/driver_bench/src/driver.mts +++ b/test/benchmarks/driver_bench/src/driver.mts @@ -7,6 +7,20 @@ import process from 'node:process'; const __dirname = import.meta.dirname; const require = module.createRequire(__dirname); +export const TAG = { + // Special tag that marks a benchmark as a spec-required benchmark + spec: 'spec-benchmark', + // Special tag that enables our perf monitoring tooling to create alerts when regressions in this + // benchmark's performance are detected + alert: 'alerting-benchmark', + // Tag marking a benchmark as being related to cursor performance + cursor: 'cursor-benchmark', + // Tag marking a benchmark as being related to read performance + read: 'read-benchmark', + // Tag marking a benchmark as being related to write performance + write: 'write-benchmark' +}; + /** * The path to the MongoDB Node.js driver. * This MUST be set to the directory the driver is installed in @@ -118,19 +132,23 @@ export const PARALLEL_DIRECTORY = path.resolve(SPEC_DIRECTORY, 'parallel'); export const TEMP_DIRECTORY = path.resolve(SPEC_DIRECTORY, 'tmp'); export type Metric = { - name: 'megabytes_per_second'; + name: 'megabytes_per_second' | 'normalized_throughput'; value: number; + metadata: { + improvement_direction: 'up' | 'down'; + }; }; export type MetricInfo = { info: { test_name: string; args: Record; + tags?: string[]; }; metrics: Metric[]; }; -export function metrics(test_name: string, result: number): MetricInfo { +export function metrics(test_name: string, result: number, tags?: string[]): MetricInfo { return { info: { test_name, @@ -141,9 +159,13 @@ export function metrics(test_name: string, result: number): MetricInfo { key, typeof value === 'number' ? value : value ? 1 : 0 ]) - ) + ), + tags }, - metrics: [{ name: 'megabytes_per_second', value: result }] + // FIXME(NODE-6781): For now all of our metrics are of throughput so their improvement_direction is up, + metrics: [ + { name: 'megabytes_per_second', value: result, metadata: { improvement_direction: 'up' } } + ] } as const; } diff --git a/test/benchmarks/driver_bench/src/main.mts b/test/benchmarks/driver_bench/src/main.mts index 67309b7d49a..04e573b7734 100644 --- a/test/benchmarks/driver_bench/src/main.mts +++ b/test/benchmarks/driver_bench/src/main.mts @@ -85,7 +85,7 @@ console.log(systemInfo()); const runnerPath = path.join(__dirname, 'runner.mjs'); -const results: MetricInfo[] = []; +let results: MetricInfo[] = []; for (const [suite, benchmarks] of Object.entries(tests)) { console.group(snakeToCamel(suite)); @@ -198,6 +198,42 @@ function calculateCompositeBenchmarks(results: MetricInfo[]) { return [...results, ...compositeResults]; } -const finalResults = calculateCompositeBenchmarks(results); +function calculateNormalizedResults(results: MetricInfo[]): MetricInfo[] { + const baselineBench = results.find(r => r.info.test_name === 'cpuBaseline'); + const pingBench = results.find(r => r.info.test_name === 'ping'); + + assert.ok(pingBench, 'ping bench results not found!'); + assert.ok(baselineBench, 'baseline results not found!'); + const pingThroughput = pingBench.metrics[0].value; + const cpuBaseline = baselineBench.metrics[0].value; + + for (const bench of results) { + if (bench.info.test_name === 'cpuBaseline') continue; + if (bench.info.test_name === 'ping') { + bench.metrics.push({ + name: 'normalized_throughput', + value: bench.metrics[0].value / cpuBaseline, + metadata: { + improvement_direction: 'up' + } + }); + } + // Compute normalized_throughput of benchmarks against ping bench + else { + bench.metrics.push({ + name: 'normalized_throughput', + value: bench.metrics[0].value / pingThroughput, + metadata: { + improvement_direction: 'up' + } + }); + } + } + + return results; +} + +results = calculateCompositeBenchmarks(results); +results = calculateNormalizedResults(results); -await fs.writeFile('results.json', JSON.stringify(finalResults, undefined, 2), 'utf8'); +await fs.writeFile('results.json', JSON.stringify(results, undefined, 2), 'utf8'); diff --git a/test/benchmarks/driver_bench/src/runner.mts b/test/benchmarks/driver_bench/src/runner.mts index f360b2ef0ac..b68bc9cfe49 100644 --- a/test/benchmarks/driver_bench/src/runner.mts +++ b/test/benchmarks/driver_bench/src/runner.mts @@ -14,6 +14,7 @@ type BenchmarkModule = { run: () => Promise; afterEach?: () => Promise; after?: () => Promise; + tags?: string[]; }; const benchmarkName = snakeToCamel(path.basename(benchmarkFile, '.mjs')); @@ -80,6 +81,14 @@ function percentileIndex(percentile: number, count: number) { const medianExecution = durations[percentileIndex(50, count)]; const megabytesPerSecond = benchmark.taskSize / medianExecution; +const tags = benchmark.tags; +if ( + tags && + (!Array.isArray(tags) || (tags.length > 0 && !tags.every(t => typeof t === 'string'))) +) { + throw new Error('If tags is specified, it MUST be an array of strings'); +} + console.log( ' '.repeat(3), ...['total time:', totalDuration, 'sec,'], @@ -91,6 +100,6 @@ console.log( await fs.writeFile( `results_${path.basename(benchmarkFile, '.mjs')}.json`, - JSON.stringify(metrics(benchmarkName, megabytesPerSecond), undefined, 2) + '\n', + JSON.stringify(metrics(benchmarkName, megabytesPerSecond, tags), undefined, 2) + '\n', 'utf8' ); diff --git a/test/benchmarks/driver_bench/src/suites/multi_bench/find_many_and_empty_cursor.mts b/test/benchmarks/driver_bench/src/suites/multi_bench/find_many_and_empty_cursor.mts index 09c4cadd97a..deb745ea646 100644 --- a/test/benchmarks/driver_bench/src/suites/multi_bench/find_many_and_empty_cursor.mts +++ b/test/benchmarks/driver_bench/src/suites/multi_bench/find_many_and_empty_cursor.mts @@ -1,8 +1,10 @@ /* eslint-disable @typescript-eslint/no-unused-vars */ -import { driver, type mongodb } from '../../driver.mjs'; +import { driver, type mongodb, TAG } from '../../driver.mjs'; export const taskSize = 16.22; +export const tags = [TAG.alert, TAG.spec, TAG.cursor, TAG.read]; + let collection: mongodb.Collection; export async function before() { diff --git a/test/benchmarks/driver_bench/src/suites/multi_bench/grid_fs_download.mts b/test/benchmarks/driver_bench/src/suites/multi_bench/grid_fs_download.mts index 31b85447858..67b1c802517 100644 --- a/test/benchmarks/driver_bench/src/suites/multi_bench/grid_fs_download.mts +++ b/test/benchmarks/driver_bench/src/suites/multi_bench/grid_fs_download.mts @@ -1,10 +1,12 @@ import { Readable, Writable } from 'node:stream'; import { pipeline } from 'node:stream/promises'; -import { driver, type mongodb } from '../../driver.mjs'; +import { driver, type mongodb, TAG } from '../../driver.mjs'; export const taskSize = 52.43; +export const tags = [TAG.alert, TAG.spec, TAG.cursor, TAG.read]; + let bucket: mongodb.GridFSBucket; let bin: Uint8Array; let _id: mongodb.ObjectId; diff --git a/test/benchmarks/driver_bench/src/suites/multi_bench/grid_fs_upload.mts b/test/benchmarks/driver_bench/src/suites/multi_bench/grid_fs_upload.mts index 86361639d3c..002c530ec01 100644 --- a/test/benchmarks/driver_bench/src/suites/multi_bench/grid_fs_upload.mts +++ b/test/benchmarks/driver_bench/src/suites/multi_bench/grid_fs_upload.mts @@ -1,9 +1,10 @@ import { Readable } from 'node:stream'; import { pipeline } from 'node:stream/promises'; -import { driver, type mongodb } from '../../driver.mjs'; +import { driver, type mongodb, TAG } from '../../driver.mjs'; export const taskSize = 52.43; +export const tags = [TAG.alert, TAG.spec, TAG.write]; let bucket: mongodb.GridFSBucket; let uploadStream: mongodb.GridFSBucketWriteStream; diff --git a/test/benchmarks/driver_bench/src/suites/multi_bench/large_doc_bulk_insert.mts b/test/benchmarks/driver_bench/src/suites/multi_bench/large_doc_bulk_insert.mts index 5b913aefe6c..6ca9edaaf64 100644 --- a/test/benchmarks/driver_bench/src/suites/multi_bench/large_doc_bulk_insert.mts +++ b/test/benchmarks/driver_bench/src/suites/multi_bench/large_doc_bulk_insert.mts @@ -1,6 +1,7 @@ -import { driver, type mongodb } from '../../driver.mjs'; +import { driver, type mongodb, TAG } from '../../driver.mjs'; export const taskSize = 27.31; +export const tags = [TAG.alert, TAG.spec, TAG.write]; let collection: mongodb.Collection; let documents: any[]; diff --git a/test/benchmarks/driver_bench/src/suites/multi_bench/small_doc_bulk_insert.mts b/test/benchmarks/driver_bench/src/suites/multi_bench/small_doc_bulk_insert.mts index 922002f49b4..0047731fe1b 100644 --- a/test/benchmarks/driver_bench/src/suites/multi_bench/small_doc_bulk_insert.mts +++ b/test/benchmarks/driver_bench/src/suites/multi_bench/small_doc_bulk_insert.mts @@ -1,6 +1,7 @@ -import { driver, type mongodb } from '../../driver.mjs'; +import { driver, type mongodb, TAG } from '../../driver.mjs'; export const taskSize = 2.75; +export const tags = [TAG.alert, TAG.spec, TAG.write]; let collection: mongodb.Collection; let documents: any[]; diff --git a/test/benchmarks/driver_bench/src/suites/node_specific/aggregate_a_million_documents_and_to_array.mts b/test/benchmarks/driver_bench/src/suites/node_specific/aggregate_a_million_documents_and_to_array.mts index 9a9e3f92fdd..87f24ef3fee 100644 --- a/test/benchmarks/driver_bench/src/suites/node_specific/aggregate_a_million_documents_and_to_array.mts +++ b/test/benchmarks/driver_bench/src/suites/node_specific/aggregate_a_million_documents_and_to_array.mts @@ -1,6 +1,7 @@ -import { driver, type mongodb } from '../../driver.mjs'; +import { driver, type mongodb, TAG } from '../../driver.mjs'; export const taskSize = 16; +export const tags = [TAG.alert, TAG.cursor, TAG.read]; let db: mongodb.Db; diff --git a/test/benchmarks/driver_bench/src/suites/node_specific/aggregate_a_million_tweets_and_to_array.mts b/test/benchmarks/driver_bench/src/suites/node_specific/aggregate_a_million_tweets_and_to_array.mts index da2ba5cc416..4432f40a4ff 100644 --- a/test/benchmarks/driver_bench/src/suites/node_specific/aggregate_a_million_tweets_and_to_array.mts +++ b/test/benchmarks/driver_bench/src/suites/node_specific/aggregate_a_million_tweets_and_to_array.mts @@ -1,6 +1,7 @@ -import { driver, type mongodb } from '../../driver.mjs'; +import { driver, type mongodb, TAG } from '../../driver.mjs'; export const taskSize = 1500; +export const tags = [TAG.alert, TAG.cursor, TAG.read]; let db: mongodb.Db; let tweet: Record; diff --git a/test/benchmarks/driver_bench/src/suites/node_specific/primes.mts b/test/benchmarks/driver_bench/src/suites/node_specific/cpu_baseline.mts similarity index 84% rename from test/benchmarks/driver_bench/src/suites/node_specific/primes.mts rename to test/benchmarks/driver_bench/src/suites/node_specific/cpu_baseline.mts index 6b5906408cc..9760f90921b 100644 --- a/test/benchmarks/driver_bench/src/suites/node_specific/primes.mts +++ b/test/benchmarks/driver_bench/src/suites/node_specific/cpu_baseline.mts @@ -6,12 +6,9 @@ const expectedPrimes = 78_498; // byteLength of // BSON.serialize({ primes: Buffer.from(new Int32Array(sieveOfEratosthenes(findPrimesBelow)).buffer) }).byteLength) // a bin data of int32s -const byteLength = 314_010; - -export const taskSize = 3.1401000000000003; // ~3MB worth of work - -assert.equal(taskSize, byteLength * 10e-6); // taskSize should stay hardcoded, checking here the math is done right. +const stableRegionMean = 42.82; +export const taskSize = 3.1401000000000003 / stableRegionMean; // ~3MB worth of work scaled down by the mean of the current stable region in CI to bring this value to roughly 1 /** @see https://en.wikipedia.org/wiki/Sieve_of_Eratosthenes */ export function sieveOfEratosthenes(n: number) { // Create a boolean array "prime[0..n]" and initialize diff --git a/test/benchmarks/driver_bench/src/suites/node_specific/find_many_and_to_array.mts b/test/benchmarks/driver_bench/src/suites/node_specific/find_many_and_to_array.mts index 778d44c3a3d..f435995ea98 100644 --- a/test/benchmarks/driver_bench/src/suites/node_specific/find_many_and_to_array.mts +++ b/test/benchmarks/driver_bench/src/suites/node_specific/find_many_and_to_array.mts @@ -1,7 +1,9 @@ -import { driver, type mongodb } from '../../driver.mjs'; +import { driver, type mongodb, TAG } from '../../driver.mjs'; export const taskSize = 16.22; +export const tags = [TAG.alert, TAG.spec, TAG.cursor, TAG.read]; + let collection: mongodb.Collection; export async function before() { diff --git a/test/benchmarks/driver_bench/src/suites/node_specific/ping.mts b/test/benchmarks/driver_bench/src/suites/node_specific/ping.mts index ac8e462381a..2f95eea344d 100644 --- a/test/benchmarks/driver_bench/src/suites/node_specific/ping.mts +++ b/test/benchmarks/driver_bench/src/suites/node_specific/ping.mts @@ -1,7 +1,8 @@ -import { driver, type mongodb } from '../../driver.mjs'; +import { driver, type mongodb, TAG } from '../../driver.mjs'; // { ping: 1 } is 15 bytes of BSON x 10,000 iterations export const taskSize = 0.15; +export const tags = [TAG.alert]; let db: mongodb.Db; diff --git a/test/benchmarks/driver_bench/src/suites/parallel_bench/gridfs_multi_file_download.mts b/test/benchmarks/driver_bench/src/suites/parallel_bench/gridfs_multi_file_download.mts index 9345826a604..656919ed9b2 100644 --- a/test/benchmarks/driver_bench/src/suites/parallel_bench/gridfs_multi_file_download.mts +++ b/test/benchmarks/driver_bench/src/suites/parallel_bench/gridfs_multi_file_download.mts @@ -2,9 +2,10 @@ import { createReadStream, createWriteStream, promises as fs } from 'node:fs'; import path from 'node:path'; import { pipeline } from 'node:stream/promises'; -import { driver, type mongodb, PARALLEL_DIRECTORY, TEMP_DIRECTORY } from '../../driver.mjs'; +import { driver, type mongodb, PARALLEL_DIRECTORY, TAG, TEMP_DIRECTORY } from '../../driver.mjs'; export const taskSize = 262.144; +export const tags = [TAG.spec, TAG.alert, TAG.read, TAG.cursor]; let bucket: mongodb.GridFSBucket; diff --git a/test/benchmarks/driver_bench/src/suites/parallel_bench/gridfs_multi_file_upload.mts b/test/benchmarks/driver_bench/src/suites/parallel_bench/gridfs_multi_file_upload.mts index 6c29c18ea15..0942f6bc414 100644 --- a/test/benchmarks/driver_bench/src/suites/parallel_bench/gridfs_multi_file_upload.mts +++ b/test/benchmarks/driver_bench/src/suites/parallel_bench/gridfs_multi_file_upload.mts @@ -3,9 +3,10 @@ import path from 'node:path'; import { Readable } from 'node:stream'; import { pipeline } from 'node:stream/promises'; -import { driver, type mongodb, PARALLEL_DIRECTORY } from '../../driver.mjs'; +import { driver, type mongodb, PARALLEL_DIRECTORY, TAG } from '../../driver.mjs'; export const taskSize = 262.144; +export const tags = [TAG.spec, TAG.alert, TAG.write]; let bucket: mongodb.GridFSBucket; diff --git a/test/benchmarks/driver_bench/src/suites/parallel_bench/ldjson_multi_file_export.mts b/test/benchmarks/driver_bench/src/suites/parallel_bench/ldjson_multi_file_export.mts index 81d05dcb526..62fef2aa600 100644 --- a/test/benchmarks/driver_bench/src/suites/parallel_bench/ldjson_multi_file_export.mts +++ b/test/benchmarks/driver_bench/src/suites/parallel_bench/ldjson_multi_file_export.mts @@ -3,9 +3,17 @@ import path from 'node:path'; import readline from 'node:readline/promises'; import stream from 'node:stream/promises'; -import { driver, EJSON, type mongodb, PARALLEL_DIRECTORY, TEMP_DIRECTORY } from '../../driver.mjs'; +import { + driver, + EJSON, + type mongodb, + PARALLEL_DIRECTORY, + TAG, + TEMP_DIRECTORY +} from '../../driver.mjs'; export const taskSize = 565; +export const tags = [TAG.spec, TAG.alert, TAG.write]; let collection: mongodb.Collection; diff --git a/test/benchmarks/driver_bench/src/suites/parallel_bench/ldjson_multi_file_upload.mts b/test/benchmarks/driver_bench/src/suites/parallel_bench/ldjson_multi_file_upload.mts index 5f5dab24f90..46a53823924 100644 --- a/test/benchmarks/driver_bench/src/suites/parallel_bench/ldjson_multi_file_upload.mts +++ b/test/benchmarks/driver_bench/src/suites/parallel_bench/ldjson_multi_file_upload.mts @@ -2,9 +2,10 @@ import { createReadStream, promises as fs } from 'node:fs'; import path from 'node:path'; import readline from 'node:readline/promises'; -import { driver, type mongodb, PARALLEL_DIRECTORY } from '../../driver.mjs'; +import { driver, type mongodb, PARALLEL_DIRECTORY, TAG } from '../../driver.mjs'; export const taskSize = 565; +export const tags = [TAG.spec, TAG.alert, TAG.write]; const directory = path.resolve(PARALLEL_DIRECTORY, 'ldjson_multi'); let collection: mongodb.Collection; diff --git a/test/benchmarks/driver_bench/src/suites/single_bench/find_one.mts b/test/benchmarks/driver_bench/src/suites/single_bench/find_one.mts index d99ac5f9bfe..e079c723c3e 100644 --- a/test/benchmarks/driver_bench/src/suites/single_bench/find_one.mts +++ b/test/benchmarks/driver_bench/src/suites/single_bench/find_one.mts @@ -1,6 +1,7 @@ -import { driver, type mongodb } from '../../driver.mjs'; +import { driver, type mongodb, TAG } from '../../driver.mjs'; export const taskSize = 16.22; +export const tags = [TAG.spec, TAG.alert, TAG.cursor, TAG.read]; let collection: mongodb.Collection<{ _id: number }>; diff --git a/test/benchmarks/driver_bench/src/suites/single_bench/large_doc_insert_one.mts b/test/benchmarks/driver_bench/src/suites/single_bench/large_doc_insert_one.mts index 4dcae894903..70cf758888b 100644 --- a/test/benchmarks/driver_bench/src/suites/single_bench/large_doc_insert_one.mts +++ b/test/benchmarks/driver_bench/src/suites/single_bench/large_doc_insert_one.mts @@ -1,6 +1,7 @@ -import { driver, type mongodb } from '../../driver.mjs'; +import { driver, type mongodb, TAG } from '../../driver.mjs'; export const taskSize = 27.31; +export const tags = [TAG.spec, TAG.alert, TAG.write]; let collection: mongodb.Collection; let documents: Record[]; diff --git a/test/benchmarks/driver_bench/src/suites/single_bench/run_command.mts b/test/benchmarks/driver_bench/src/suites/single_bench/run_command.mts index a2884ebb343..0a55c4fae37 100644 --- a/test/benchmarks/driver_bench/src/suites/single_bench/run_command.mts +++ b/test/benchmarks/driver_bench/src/suites/single_bench/run_command.mts @@ -1,7 +1,8 @@ -import { driver, type mongodb } from '../../driver.mjs'; +import { driver, type mongodb, TAG } from '../../driver.mjs'; // { hello: true } is 13 bytes of BSON x 10,000 iterations export const taskSize = 0.13; +export const tags = [TAG.spec, TAG.alert]; let db: mongodb.Db; diff --git a/test/benchmarks/driver_bench/src/suites/single_bench/small_doc_insert_one.mts b/test/benchmarks/driver_bench/src/suites/single_bench/small_doc_insert_one.mts index 4995a9b2539..070b0fff7e5 100644 --- a/test/benchmarks/driver_bench/src/suites/single_bench/small_doc_insert_one.mts +++ b/test/benchmarks/driver_bench/src/suites/single_bench/small_doc_insert_one.mts @@ -1,6 +1,7 @@ -import { driver, type mongodb } from '../../driver.mjs'; +import { driver, type mongodb, TAG } from '../../driver.mjs'; export const taskSize = 2.75; +export const tags = [TAG.spec, TAG.alert, TAG.write]; let collection: mongodb.Collection; let documents: Record[]; diff --git a/test/integration/auth/auth.prose.test.ts b/test/integration/auth/auth.prose.test.ts index 0ce50a06805..3bf068dbb91 100644 --- a/test/integration/auth/auth.prose.test.ts +++ b/test/integration/auth/auth.prose.test.ts @@ -305,7 +305,8 @@ describe('Authentication Spec Prose Tests', function () { ); }); - describe('Step 4', function () { + // TODO(NODE-6752): Fix flaky SCRAM-SHA-256 tests + describe.skip('Step 4', function () { /** * Step 4 * To test SASLprep behavior, create two users: @@ -376,8 +377,7 @@ describe('Authentication Spec Prose Tests', function () { expect(stats).to.exist; }); - // TODO(NODE-6752): Fix flaky SCRAM-SHA-256 test - it.skip( + it( 'logs in with non-normalized username and normalized password', metadata, async function () { diff --git a/test/integration/client-side-encryption/client_side_encryption.prose.25.lookup.test.ts b/test/integration/client-side-encryption/client_side_encryption.prose.25.lookup.test.ts new file mode 100644 index 00000000000..d4365ac6d6d --- /dev/null +++ b/test/integration/client-side-encryption/client_side_encryption.prose.25.lookup.test.ts @@ -0,0 +1,432 @@ +import * as fs from 'node:fs/promises'; +import * as path from 'node:path'; + +import { expect } from 'chai'; +import { type MongoCryptOptions } from 'mongodb-client-encryption'; +import * as sinon from 'sinon'; + +import { getCSFLEKMSProviders } from '../../csfle-kms-providers'; +import { AutoEncrypter, BSON, type Document, type MongoClient } from '../../mongodb'; +import { type TestConfiguration } from '../../tools/runner/config'; +import { getEncryptExtraOptions } from '../../tools/utils'; + +const defaultMetadata: MongoDBMetadataUI = { + requires: { + topology: '!single', + clientSideEncryption: '>=6.3.0', + mongodb: '>=7.0.0' + } +}; + +const readFixture = async (name: string) => + BSON.EJSON.parse( + await fs.readFile( + path.resolve(__dirname, `../../spec/client-side-encryption/etc/data/lookup/${name}`), + 'utf8' + ) + ); + +const newEncryptedClient = ({ configuration }: { configuration: TestConfiguration }) => + configuration.newClient( + {}, + { + writeConcern: { w: 'majority' }, + autoEncryption: { + keyVaultNamespace: 'db.keyvault', + kmsProviders: { local: getCSFLEKMSProviders().local }, + extraOptions: getEncryptExtraOptions() + } + } + ); + +describe('$lookup support', defaultMetadata, function () { + before(async function () { + const mochaTest = { metadata: defaultMetadata }; + + if (!this.configuration.filters.MongoDBVersionFilter.filter(mochaTest)) { + return; + } + + if (!this.configuration.filters.MongoDBTopologyFilter.filter(mochaTest)) { + return; + } + + if (!this.configuration.filters.ClientSideEncryptionFilter.filter(mochaTest)) { + return; + } + + let unencryptedClient: MongoClient, encryptedClient: MongoClient; + try { + /** + * Create an encrypted MongoClient configured with: + * + * ```txt + * AutoEncryptionOpts( + * keyVaultNamespace="db.keyvault", + * kmsProviders={"local": { "key": "" }} + * ) + * ``` + */ + encryptedClient = newEncryptedClient(this); + + /** Drop database db. */ + await encryptedClient.db('db').dropDatabase(); + + /** Insert `key-doc.json` into db.keyvault. */ + const keyDoc = await readFixture('key-doc.json'); + await encryptedClient.db('db').collection('keyvault').insertOne(keyDoc); + + /** + * Create the following collections: + * ``` + * db.csfle with options: { "validator": { "$jsonSchema": ""}}. + * db.csfle2 with options: { "validator": { "$jsonSchema": ""}}. + * db.qe with options: { "encryptedFields": ""}. + * db.qe2 with options: { "encryptedFields": ""}. + * db.no_schema with no options. + * db.no_schema2 with no options. + * ``` + */ + const collections = [ + { + name: 'csfle', + options: { validator: { $jsonSchema: await readFixture('schema-csfle.json') } }, + document: { csfle: 'csfle' } + }, + { + name: 'csfle2', + options: { validator: { $jsonSchema: await readFixture('schema-csfle2.json') } }, + document: { csfle2: 'csfle2' } + }, + { + name: 'qe', + options: { encryptedFields: await readFixture('schema-qe.json') }, + document: { qe: 'qe' } + }, + { + name: 'qe2', + options: { encryptedFields: await readFixture('schema-qe2.json') }, + document: { qe2: 'qe2' } + }, + { + name: 'no_schema', + options: {}, + document: { no_schema: 'no_schema' } + }, + { + name: 'no_schema2', + options: {}, + document: { no_schema2: 'no_schema2' } + } + ]; + + for (const { name, options } of collections) { + await encryptedClient.db('db').createCollection(name, options); + } + + /** Create an unencrypted MongoClient. */ + unencryptedClient = this.configuration.newClient({}, { writeConcern: { w: 'majority' } }); + + /** + * ``` + * {"csfle": "csfle"} into db.csfle + * Use the unencrypted client to retrieve it. Assert the csfle field is BSON binary. + * {"csfle2": "csfle2"} into db.csfle2 + * Use the unencrypted client to retrieve it. Assert the csfle2 field is BSON binary. + * {"qe": "qe"} into db.qe + * Use the unencrypted client to retrieve it. Assert the qe field is BSON binary. + * {"qe2": "qe2"} into db.qe2 + * Use the unencrypted client to retrieve it. Assert the qe2 field is BSON binary. + * {"no_schema": "no_schema"} into db.no_schema + * {"no_schema2": "no_schema2"} into db.no_schema2 + * ``` + */ + for (const { name, document } of collections) { + const { insertedId } = await encryptedClient.db('db').collection(name).insertOne(document); + + if (name.startsWith('no_')) continue; + + expect(await unencryptedClient.db('db').collection(name).findOne(insertedId)) + .to.have.property(Object.keys(document)[0]) + .that.has.property('_bsontype', 'Binary'); + } + } finally { + await unencryptedClient?.close(); + await encryptedClient?.close(); + } + }); + + const test = function ( + title: string, + collName: string, + pipeline: Document[], + expected: Document | RegExp, + metadata?: MongoDBMetadataUI + ) { + describe(title.slice(0, title.indexOf(':')), function () { + let client: MongoClient; + + beforeEach(async function () { + client = newEncryptedClient(this); + }); + + afterEach(async function () { + await client.close(); + }); + + it(title.slice(title.indexOf(':') + 1).trim(), metadata ?? defaultMetadata, async () => { + const collection = client.db('db').collection(collName); + const actual = await collection + .aggregate(pipeline) + .toArray() + .catch(error => error); + + const expectedError = expected instanceof RegExp; + + if (expectedError) { + expect(actual).to.be.instanceOf(Error); + if (!expected.test(actual.message)) { + throw actual; + } + } else if (actual instanceof Error) { + throw actual; + } else { + expect(actual).to.have.lengthOf(1); + expect(actual[0]).to.deep.equal(expected); + } + }); + }); + }; + + test( + 'Case 1: db.csfle joins db.no_schema', + 'csfle', + [ + { $match: { csfle: 'csfle' } }, + { + $lookup: { + from: 'no_schema', + as: 'matched', + pipeline: [{ $match: { no_schema: 'no_schema' } }, { $project: { _id: 0 } }] + } + }, + { $project: { _id: 0 } } + ], + { csfle: 'csfle', matched: [{ no_schema: 'no_schema' }] }, + { requires: { ...defaultMetadata.requires, mongodb: '>=8.1.0' } } + ); + + test( + 'Case 2: db.qe joins db.no_schema', + 'qe', + [ + { $match: { qe: 'qe' } }, + { + $lookup: { + from: 'no_schema', + as: 'matched', + pipeline: [ + { $match: { no_schema: 'no_schema' } }, + { $project: { _id: 0, __safeContent__: 0 } } + ] + } + }, + { $project: { _id: 0, __safeContent__: 0 } } + ], + { qe: 'qe', matched: [{ no_schema: 'no_schema' }] }, + { requires: { ...defaultMetadata.requires, mongodb: '>=8.1.0' } } + ); + + test( + 'Case 3: db.no_schema joins db.csfle', + 'no_schema', + [ + { $match: { no_schema: 'no_schema' } }, + { + $lookup: { + from: 'csfle', + as: 'matched', + pipeline: [{ $match: { csfle: 'csfle' } }, { $project: { _id: 0 } }] + } + }, + { $project: { _id: 0 } } + ], + { no_schema: 'no_schema', matched: [{ csfle: 'csfle' }] }, + { requires: { ...defaultMetadata.requires, mongodb: '>=8.1.0' } } + ); + + test( + 'Case 4: db.no_schema joins db.qe', + 'no_schema', + [ + { $match: { no_schema: 'no_schema' } }, + { + $lookup: { + from: 'qe', + as: 'matched', + pipeline: [{ $match: { qe: 'qe' } }, { $project: { _id: 0, __safeContent__: 0 } }] + } + }, + { $project: { _id: 0 } } + ], + { no_schema: 'no_schema', matched: [{ qe: 'qe' }] }, + { requires: { ...defaultMetadata.requires, mongodb: '>=8.1.0' } } + ); + + test( + 'Case 5: db.csfle joins db.csfle2', + 'csfle', + [ + { $match: { csfle: 'csfle' } }, + { + $lookup: { + from: 'csfle2', + as: 'matched', + pipeline: [{ $match: { csfle2: 'csfle2' } }, { $project: { _id: 0 } }] + } + }, + { $project: { _id: 0 } } + ], + { csfle: 'csfle', matched: [{ csfle2: 'csfle2' }] }, + { requires: { ...defaultMetadata.requires, mongodb: '>=8.1.0' } } + ); + + test( + 'Case 6: db.qe joins db.qe2', + 'qe', + [ + { $match: { qe: 'qe' } }, + { + $lookup: { + from: 'qe2', + as: 'matched', + pipeline: [{ $match: { qe2: 'qe2' } }, { $project: { _id: 0, __safeContent__: 0 } }] + } + }, + { $project: { _id: 0, __safeContent__: 0 } } + ], + { qe: 'qe', matched: [{ qe2: 'qe2' }] }, + { requires: { ...defaultMetadata.requires, mongodb: '>=8.1.0' } } + ); + + test( + 'Case 7: db.no_schema joins db.no_schema2', + 'no_schema', + [ + { $match: { no_schema: 'no_schema' } }, + { + $lookup: { + from: 'no_schema2', + as: 'matched', + pipeline: [{ $match: { no_schema2: 'no_schema2' } }, { $project: { _id: 0 } }] + } + }, + { $project: { _id: 0 } } + ], + { no_schema: 'no_schema', matched: [{ no_schema2: 'no_schema2' }] }, + { requires: { ...defaultMetadata.requires, mongodb: '>=8.1.0' } } + ); + + test( + 'Case 8: db.csfle joins db.qe', + 'csfle', + [ + { $match: { csfle: 'qe' } }, + { + $lookup: { + from: 'qe', + as: 'matched', + pipeline: [{ $match: { qe: 'qe' } }, { $project: { _id: 0 } }] + } + }, + { $project: { _id: 0 } } + ], + /not supported/i, + { requires: { ...defaultMetadata.requires, mongodb: '>=8.1.0' } } + ); + + test( + 'Case 9: test error with <8.1', + 'csfle', + [ + { $match: { csfle: 'csfle' } }, + { + $lookup: { + from: 'no_schema', + as: 'matched', + pipeline: [{ $match: { no_schema: 'no_schema' } }, { $project: { _id: 0 } }] + } + }, + { $project: { _id: 0 } } + ], + /Upgrade/i, + { requires: { ...defaultMetadata.requires, mongodb: '>=7.0.0 <8.1.0' } } + ); + + describe('Node.js custom test', function () { + describe('when enableMultipleCollinfo is off and a $lookup is run', function () { + let client: MongoClient; + + beforeEach(async function () { + const mochaTest = { metadata: defaultMetadata }; + + if (!this.configuration.filters.MongoDBVersionFilter.filter(mochaTest)) { + return; + } + + if (!this.configuration.filters.MongoDBTopologyFilter.filter(mochaTest)) { + return; + } + + if (!this.configuration.filters.ClientSideEncryptionFilter.filter(mochaTest)) { + return; + } + + const getMongoCrypt = sinon.stub(AutoEncrypter, 'getMongoCrypt').callsFake(function () { + const MongoCrypt = getMongoCrypt.wrappedMethod.call(this); + return class extends MongoCrypt { + constructor(options: MongoCryptOptions) { + expect(options).to.have.property('enableMultipleCollinfo', true); // assert invariant + options.enableMultipleCollinfo = false; + super(options); + } + }; + }); + + client = newEncryptedClient(this); + }); + + afterEach(async function () { + sinon.restore(); + await client?.close(); + }); + + it( + 'throws a TypeError about libmongocrypt not enabled to support multiple collections', + defaultMetadata, + async () => { + const collection = client.db('db').collection('csfle'); + const actual = await collection + .aggregate([ + { $match: { csfle: 'csfle' } }, + { + $lookup: { + from: 'csfle2', + as: 'matched', + pipeline: [{ $match: { csfle2: 'csfle2' } }, { $project: { _id: 0 } }] + } + }, + { $project: { _id: 0 } } + ]) + .toArray() + .catch(error => error); + + expect(actual).to.be.instanceOf(TypeError); + expect(actual.message).to.match( + /libmongocrypt is not configured to support encrypting a command with multiple collections/i + ); + } + ); + }); + }); +}); diff --git a/test/integration/client-side-encryption/client_side_encryption.prose.test.js b/test/integration/client-side-encryption/client_side_encryption.prose.test.js index d067bf1804c..0e773654ec7 100644 --- a/test/integration/client-side-encryption/client_side_encryption.prose.test.js +++ b/test/integration/client-side-encryption/client_side_encryption.prose.test.js @@ -16,9 +16,6 @@ const { } = require('../../spec/client-side-encryption/external/external-schema.json'); /* eslint-disable no-restricted-modules */ const { ClientEncryption } = require('../../../src/client-side-encryption/client_encryption'); -const { - ClientSideEncryptionFilter -} = require('../../tools/runner/filters/client_encryption_filter'); const { getCSFLEKMSProviders } = require('../../csfle-kms-providers'); const { AlpineTestConfiguration } = require('../../tools/runner/config'); @@ -1146,7 +1143,8 @@ describe('Client Side Encryption Prose Tests', metadata, function () { mongocryptdSpawnArgs: [ '--pidfilepath=bypass-spawning-mongocryptd.pid', '--port=27021' - ] + ], + cryptSharedLibSearchPaths: [] } } } @@ -1393,13 +1391,13 @@ describe('Client Side Encryption Prose Tests', metadata, function () { }; const clientNoTlsOptions = { keyVaultNamespace, - kmsProviders: getKmsProviders(null, null, '127.0.0.1:8002', '127.0.0.1:8002'), + kmsProviders: getKmsProviders(null, null, '127.0.0.1:9002', '127.0.0.1:9002'), tlsOptions: tlsCaOptions, extraOptions: getEncryptExtraOptions() }; const clientWithTlsOptions = { keyVaultNamespace, - kmsProviders: getKmsProviders(null, null, '127.0.0.1:8002', '127.0.0.1:8002'), + kmsProviders: getKmsProviders(null, null, '127.0.0.1:9002', '127.0.0.1:9002'), tlsOptions: { aws: { tlsCAFile: process.env.CSFLE_TLS_CA_FILE, @@ -1422,13 +1420,13 @@ describe('Client Side Encryption Prose Tests', metadata, function () { }; const clientWithTlsExpiredOptions = { keyVaultNamespace, - kmsProviders: getKmsProviders(null, '127.0.0.1:8000', '127.0.0.1:8000', '127.0.0.1:8000'), + kmsProviders: getKmsProviders(null, '127.0.0.1:9000', '127.0.0.1:9000', '127.0.0.1:9000'), tlsOptions: tlsCaOptions, extraOptions: getEncryptExtraOptions() }; const clientWithInvalidHostnameOptions = { keyVaultNamespace, - kmsProviders: getKmsProviders(null, '127.0.0.1:8001', '127.0.0.1:8001', '127.0.0.1:8001'), + kmsProviders: getKmsProviders(null, '127.0.0.1:9001', '127.0.0.1:9001', '127.0.0.1:9001'), tlsOptions: tlsCaOptions, extraOptions: getEncryptExtraOptions() }; @@ -1505,10 +1503,10 @@ describe('Client Side Encryption Prose Tests', metadata, function () { const masterKey = { region: 'us-east-1', key: 'arn:aws:kms:us-east-1:579766882180:key/89fcc2c4-08b0-4bd9-9f25-e30687b580d0', - endpoint: '127.0.0.1:8002' + endpoint: '127.0.0.1:9002' }; - const masterKeyExpired = { ...masterKey, endpoint: '127.0.0.1:8000' }; - const masterKeyInvalidHostname = { ...masterKey, endpoint: '127.0.0.1:8001' }; + const masterKeyExpired = { ...masterKey, endpoint: '127.0.0.1:9000' }; + const masterKeyInvalidHostname = { ...masterKey, endpoint: '127.0.0.1:9001' }; it('should fail with no TLS', metadata, async function () { try { @@ -1702,9 +1700,7 @@ describe('Client Side Encryption Prose Tests', metadata, function () { context('Case 6: named KMS providers apply TLS options', function () { afterEach(() => keyvaultClient?.close()); beforeEach(async function () { - const filter = new ClientSideEncryptionFilter(); - await filter.initializeFilter({}, {}); - const shouldSkip = filter.filter({ + const shouldSkip = this.configuration.filters.ClientSideEncryptionFilter.filter({ metadata: { requires: { // 6.0.1 includes libmongocrypt 1.10. @@ -1729,12 +1725,12 @@ describe('Client Side Encryption Prose Tests', metadata, function () { tenantId: providers.azure.tenantId, clientId: providers.azure.clientId, clientSecret: providers.azure.clientId, - identityPlatformEndpoint: '127.0.0.1:8002' + identityPlatformEndpoint: '127.0.0.1:9002' }, 'gcp:no_client_cert': { email: providers.gcp.email, privateKey: providers.gcp.privateKey, - endpoint: '127.0.0.1:8002' + endpoint: '127.0.0.1:9002' }, 'kmip:no_client_cert': { endpoint: '127.0.0.1:5698' @@ -1747,12 +1743,12 @@ describe('Client Side Encryption Prose Tests', metadata, function () { tenantId: providers.azure.tenantId, clientId: providers.azure.clientId, clientSecret: providers.azure.clientId, - identityPlatformEndpoint: '127.0.0.1:8002' + identityPlatformEndpoint: '127.0.0.1:9002' }, 'gcp:with_tls': { email: providers.gcp.email, privateKey: providers.gcp.privateKey, - endpoint: '127.0.0.1:8002' + endpoint: '127.0.0.1:9002' }, 'kmip:with_tls': { endpoint: '127.0.0.1:5698' @@ -1799,7 +1795,7 @@ describe('Client Side Encryption Prose Tests', metadata, function () { masterKey: { region: 'us-east-1', key: 'arn:aws:kms:us-east-1:579766882180:key/89fcc2c4-08b0-4bd9-9f25-e30687b580d0', - endpoint: '127.0.0.1:8002' + endpoint: '127.0.0.1:9002' } }) .catch(e => e); @@ -1815,7 +1811,7 @@ describe('Client Side Encryption Prose Tests', metadata, function () { masterKey: { region: 'us-east-1', key: 'arn:aws:kms:us-east-1:579766882180:key/89fcc2c4-08b0-4bd9-9f25-e30687b580d0', - endpoint: '127.0.0.1:8002' + endpoint: '127.0.0.1:9002' } }) .catch(e => e); diff --git a/test/integration/client-side-encryption/client_side_encryption.spec.test.ts b/test/integration/client-side-encryption/client_side_encryption.spec.test.ts index 36b20f4460d..58fe5bb19bc 100644 --- a/test/integration/client-side-encryption/client_side_encryption.spec.test.ts +++ b/test/integration/client-side-encryption/client_side_encryption.spec.test.ts @@ -1,7 +1,6 @@ import * as path from 'path'; import { loadSpecTests } from '../../spec'; -import { ClientSideEncryptionFilter } from '../../tools/runner/filters/client_encryption_filter'; import { gatherTestSuites, generateTopologyTests, @@ -60,8 +59,6 @@ const SKIPPED_TESTS = new Set([ const isServerless = !!process.env.SERVERLESS; -const filter = new ClientSideEncryptionFilter(); - describe('Client Side Encryption (Legacy)', function () { const testContext = new TestRunnerContext({ requiresCSFLE: true }); const testSuites = gatherTestSuites( @@ -75,11 +72,7 @@ describe('Client Side Encryption (Legacy)', function () { return testContext.setup(this.configuration); }); - before(async function () { - await filter.initializeFilter({} as any, {}); - }); - - generateTopologyTests(testSuites, testContext, test => { + generateTopologyTests(testSuites, testContext, (test, configuration) => { const { description } = test; if (SKIPPED_TESTS.has(description)) { return 'Skipped by generic test name skip filter.'; @@ -109,7 +102,7 @@ describe('Client Side Encryption (Legacy)', function () { 'Automatically encrypt and decrypt with a named KMS provider' ].includes(description) ) { - const result = filter.filter({ + const result = configuration.filters.ClientSideEncryptionFilter.filter({ metadata: { requires: { clientSideEncryption: '>=6.0.1' } } }); @@ -121,13 +114,9 @@ describe('Client Side Encryption (Legacy)', function () { }); describe('Client Side Encryption (Unified)', function () { - before(async function () { - await filter.initializeFilter({} as any, {}); - }); - runUnifiedSuite( loadSpecTests(path.join('client-side-encryption', 'tests', 'unified')), - ({ description }) => { + ({ description }, configuration) => { const delegatedKMIPTests = [ 'rewrap with current KMS provider', 'rewrap with new local KMS provider', @@ -154,7 +143,7 @@ describe('Client Side Encryption (Unified)', function () { 'can explicitly encrypt with a named KMS provider' ]; if (delegatedKMIPTests.includes(description)) { - const shouldSkip = filter.filter({ + const shouldSkip = configuration.filters.ClientSideEncryptionFilter.filter({ metadata: { requires: { clientSideEncryption: '>=6.0.1' } } }); if (typeof shouldSkip === 'string') return shouldSkip; diff --git a/test/integration/client-side-encryption/driver.test.ts b/test/integration/client-side-encryption/driver.test.ts index 2b0f6447f1a..8862b6ac41a 100644 --- a/test/integration/client-side-encryption/driver.test.ts +++ b/test/integration/client-side-encryption/driver.test.ts @@ -718,6 +718,7 @@ describe('CSOT', function () { keyVaultClient, keyVaultNamespace: 'keyvault.datakeys', kmsProviders: getLocalKmsProvider(), + extraOptions: getEncryptExtraOptions(), schemaMap: { 'test.test': { bsonType: 'object', @@ -771,14 +772,15 @@ describe('CSOT', function () { autoEncryption: { keyVaultClient, keyVaultNamespace: 'admin.datakeys', - kmsProviders: getLocalKmsProvider() + kmsProviders: getLocalKmsProvider(), + extraOptions: getEncryptExtraOptions() } } ); }); afterEach(async function () { - await encryptedClient.close(); + await encryptedClient?.close(); }); it('the command succeeds', metadata, async function () { @@ -1045,11 +1047,19 @@ describe('CSOT', function () { }); it('the command should fail due to a timeout error', metadata, async function () { - const { duration, result: error } = await measureDuration(() => - stateMachine - .fetchCollectionInfo(encryptedClient, 'test.test', { a: 1 }, timeoutContext()) - .catch(e => e) - ); + const { duration, result: error } = await measureDuration(async () => { + try { + const cursor = stateMachine.fetchCollectionInfo( + encryptedClient, + 'test.test', + { a: 1 }, + timeoutContext() + ); + for await (const doc of cursor) void doc; + } catch (error) { + return error; + } + }); expect(error).to.be.instanceOf(MongoOperationTimeoutError); expect(duration).to.be.within(timeoutMS - 100, timeoutMS + 100); }); @@ -1072,7 +1082,8 @@ describe('CSOT', function () { }); it('the command succeeds', metadata, async function () { - await stateMachine.fetchCollectionInfo(encryptedClient, 'test.test', { a: 1 }); + const cursor = stateMachine.fetchCollectionInfo(encryptedClient, 'test.test', { a: 1 }); + for await (const doc of cursor) void doc; }); } ); diff --git a/test/integration/client-side-operations-timeout/client_side_operations_timeout.unit.test.ts b/test/integration/client-side-operations-timeout/client_side_operations_timeout.unit.test.ts index 4ca8e32f913..5d7d3f61883 100644 --- a/test/integration/client-side-operations-timeout/client_side_operations_timeout.unit.test.ts +++ b/test/integration/client-side-operations-timeout/client_side_operations_timeout.unit.test.ts @@ -200,7 +200,8 @@ describe('CSOT spec unit tests', function () { mongocryptdSpawnArgs: [ `--pidfilepath=${new ObjectId().toHexString()}.pid`, '--port=27020' - ] + ], + cryptSharedLibSearchPaths: [] }, keyVaultNamespace: 'admin.datakeys', kmsProviders: { diff --git a/test/integration/crud/crud.prose.test.ts b/test/integration/crud/crud.prose.test.ts index 8665d69a1f3..0823dcf5d60 100644 --- a/test/integration/crud/crud.prose.test.ts +++ b/test/integration/crud/crud.prose.test.ts @@ -13,6 +13,7 @@ import { MongoInvalidArgumentError, MongoServerError } from '../../mongodb'; +import { getEncryptExtraOptions } from '../../tools/utils'; import { filterForCommands } from '../shared'; describe('CRUD Prose Spec Tests', () => { @@ -1023,14 +1024,15 @@ describe('CRUD Prose Spec Tests', () => { accessKeyId: 'foo', secretAccessKey: 'bar' } - } + }, + extraOptions: getEncryptExtraOptions() } } ); }); afterEach(async function () { - await client.close(); + await client?.close(); }); it('raises a client side error', async function () { diff --git a/test/integration/crud/find_and_modify.test.ts b/test/integration/crud/find_and_modify.test.ts index 16764f020ea..18ab93cf3da 100644 --- a/test/integration/crud/find_and_modify.test.ts +++ b/test/integration/crud/find_and_modify.test.ts @@ -1,6 +1,12 @@ import { expect } from 'chai'; -import { type CommandStartedEvent, MongoServerError, ObjectId } from '../../mongodb'; +import { + type Collection, + type CommandStartedEvent, + type MongoClient, + MongoServerError, + ObjectId +} from '../../mongodb'; import { setupDatabase } from '../shared'; describe('Collection (#findOneAnd...)', function () { @@ -324,6 +330,79 @@ describe('Collection (#findOneAnd...)', function () { }); }); }); + + context('when updating with an aggregation pipeline', function () { + context('when passing includeResultMetadata: true', function () { + let client: MongoClient; + let collection: Collection<{ a: number; b: number }>; + + beforeEach(async function () { + client = this.configuration.newClient({}, { maxPoolSize: 1 }); + collection = client.db('test').collection('findAndModifyTest'); + await collection.insertMany([{ a: 1, b: 1 }], { writeConcern: { w: 1 } }); + }); + + afterEach(async function () { + await collection.drop(); + await client?.close(); + }); + + it( + 'the aggregation pipeline updates the matching document', + { + requires: { + mongodb: '>4.0' + } + }, + async function () { + const { + value: { _id, ...document } + } = await collection.findOneAndUpdate( + { a: 1 }, + [{ $set: { a: { $add: [1, '$a'] } } }], + { + includeResultMetadata: true, + returnDocument: 'after' + } + ); + expect(document).to.deep.equal({ a: 2, b: 1 }); + } + ); + }); + + context('when passing includeResultMetadata: false', function () { + let client: MongoClient; + let collection: Collection<{ a: number; b: number }>; + + beforeEach(async function () { + client = this.configuration.newClient({}, { maxPoolSize: 1 }); + collection = client.db('test').collection('findAndModifyTest'); + await collection.insertMany([{ a: 1, b: 1 }], { writeConcern: { w: 1 } }); + }); + + afterEach(async function () { + await collection.drop(); + await client?.close(); + }); + + it( + 'the aggregation pipeline updates the matching document', + { + requires: { + mongodb: '>4.0' + } + }, + async function () { + const { _id, ...document } = await collection.findOneAndUpdate( + { a: 1 }, + [{ $set: { a: { $add: [1, '$a'] } } }], + { returnDocument: 'after' } + ); + expect(document).to.deep.equal({ a: 2, b: 1 }); + } + ); + }); + }); }); describe('#findOneAndReplace', function () { diff --git a/test/readme.md b/test/readme.md index 5673fa02854..e0f39cada0c 100644 --- a/test/readme.md +++ b/test/readme.md @@ -520,15 +520,7 @@ source .evergreen/setup-fle.sh > By default, `setup-fle.sh` installs crypt_shared. If you want to test with mongocryptd instead, set the RUN_WITH_MONGOCRYPTD environment variable before > sourcing `setup-fle.sh`. - -3. Start the KMS and KMIP servers: - -```bash -bash .evergreen/run-kms-servers.sh -bash .evergreen/run-kmip-server.sh & -``` - -4. Run the functional tests: +3. Run the functional tests: ```bash export TEST_CSFLE=true npm run check:test diff --git a/test/spec/change-streams/unified/change-streams-nsType.json b/test/spec/change-streams/unified/change-streams-nsType.json new file mode 100644 index 00000000000..1861c9a5e0a --- /dev/null +++ b/test/spec/change-streams/unified/change-streams-nsType.json @@ -0,0 +1,145 @@ +{ + "description": "change-streams-nsType", + "schemaVersion": "1.7", + "runOnRequirements": [ + { + "minServerVersion": "8.1.0", + "topologies": [ + "replicaset", + "sharded" + ], + "serverless": "forbid" + } + ], + "createEntities": [ + { + "client": { + "id": "client0", + "useMultipleMongoses": false + } + }, + { + "database": { + "id": "database0", + "client": "client0", + "databaseName": "database0" + } + } + ], + "tests": [ + { + "description": "nsType is present when creating collections", + "operations": [ + { + "name": "dropCollection", + "object": "database0", + "arguments": { + "collection": "foo" + } + }, + { + "name": "createChangeStream", + "object": "database0", + "arguments": { + "pipeline": [], + "showExpandedEvents": true + }, + "saveResultAsEntity": "changeStream0" + }, + { + "name": "createCollection", + "object": "database0", + "arguments": { + "collection": "foo" + } + }, + { + "name": "iterateUntilDocumentOrError", + "object": "changeStream0", + "expectResult": { + "operationType": "create", + "nsType": "collection" + } + } + ] + }, + { + "description": "nsType is present when creating timeseries", + "operations": [ + { + "name": "dropCollection", + "object": "database0", + "arguments": { + "collection": "foo" + } + }, + { + "name": "createChangeStream", + "object": "database0", + "arguments": { + "pipeline": [], + "showExpandedEvents": true + }, + "saveResultAsEntity": "changeStream0" + }, + { + "name": "createCollection", + "object": "database0", + "arguments": { + "collection": "foo", + "timeseries": { + "timeField": "time", + "metaField": "meta", + "granularity": "minutes" + } + } + }, + { + "name": "iterateUntilDocumentOrError", + "object": "changeStream0", + "expectResult": { + "operationType": "create", + "nsType": "timeseries" + } + } + ] + }, + { + "description": "nsType is present when creating views", + "operations": [ + { + "name": "dropCollection", + "object": "database0", + "arguments": { + "collection": "foo" + } + }, + { + "name": "createChangeStream", + "object": "database0", + "arguments": { + "pipeline": [], + "showExpandedEvents": true + }, + "saveResultAsEntity": "changeStream0" + }, + { + "name": "createCollection", + "object": "database0", + "arguments": { + "collection": "foo", + "viewOn": "testName" + } + }, + { + "name": "iterateUntilDocumentOrError", + "object": "changeStream0", + "expectResult": { + "operationType": "create", + "nsType": "view" + } + } + ] + } + ] +} diff --git a/test/spec/change-streams/unified/change-streams-nsType.yml b/test/spec/change-streams/unified/change-streams-nsType.yml new file mode 100644 index 00000000000..9885c4aaf68 --- /dev/null +++ b/test/spec/change-streams/unified/change-streams-nsType.yml @@ -0,0 +1,86 @@ +description: "change-streams-nsType" +schemaVersion: "1.7" +runOnRequirements: + - minServerVersion: "8.1.0" + topologies: [ replicaset, sharded ] + serverless: forbid +createEntities: + - client: + id: &client0 client0 + useMultipleMongoses: false + - database: + id: &database0 database0 + client: *client0 + databaseName: *database0 + +tests: + - description: "nsType is present when creating collections" + operations: + - name: dropCollection + object: *database0 + arguments: + collection: &collection0 foo + - name: createChangeStream + object: *database0 + arguments: + pipeline: [] + showExpandedEvents: true + saveResultAsEntity: &changeStream0 changeStream0 + - name: createCollection + object: *database0 + arguments: + collection: *collection0 + - name: iterateUntilDocumentOrError + object: *changeStream0 + expectResult: + operationType: create + nsType: collection + + - description: "nsType is present when creating timeseries" + operations: + - name: dropCollection + object: *database0 + arguments: + collection: &collection0 foo + - name: createChangeStream + object: *database0 + arguments: + pipeline: [] + showExpandedEvents: true + saveResultAsEntity: &changeStream0 changeStream0 + - name: createCollection + object: *database0 + arguments: + collection: *collection0 + timeseries: + timeField: "time" + metaField: "meta" + granularity: "minutes" + - name: iterateUntilDocumentOrError + object: *changeStream0 + expectResult: + operationType: create + nsType: timeseries + + - description: "nsType is present when creating views" + operations: + - name: dropCollection + object: *database0 + arguments: + collection: &collection0 foo + - name: createChangeStream + object: *database0 + arguments: + pipeline: [] + showExpandedEvents: true + saveResultAsEntity: &changeStream0 changeStream0 + - name: createCollection + object: *database0 + arguments: + collection: *collection0 + viewOn: testName + - name: iterateUntilDocumentOrError + object: *changeStream0 + expectResult: + operationType: create + nsType: view \ No newline at end of file diff --git a/test/spec/client-side-encryption/etc/data/lookup/key-doc.json b/test/spec/client-side-encryption/etc/data/lookup/key-doc.json new file mode 100644 index 00000000000..566b56c354f --- /dev/null +++ b/test/spec/client-side-encryption/etc/data/lookup/key-doc.json @@ -0,0 +1,30 @@ +{ + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } +} diff --git a/test/spec/client-side-encryption/etc/data/lookup/schema-csfle.json b/test/spec/client-side-encryption/etc/data/lookup/schema-csfle.json new file mode 100644 index 00000000000..29ac9ad5da4 --- /dev/null +++ b/test/spec/client-side-encryption/etc/data/lookup/schema-csfle.json @@ -0,0 +1,19 @@ +{ + "properties": { + "csfle": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" +} diff --git a/test/spec/client-side-encryption/etc/data/lookup/schema-csfle2.json b/test/spec/client-side-encryption/etc/data/lookup/schema-csfle2.json new file mode 100644 index 00000000000..3f1c02781c5 --- /dev/null +++ b/test/spec/client-side-encryption/etc/data/lookup/schema-csfle2.json @@ -0,0 +1,19 @@ +{ + "properties": { + "csfle2": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" +} diff --git a/test/spec/client-side-encryption/etc/data/lookup/schema-qe.json b/test/spec/client-side-encryption/etc/data/lookup/schema-qe.json new file mode 100644 index 00000000000..9428ea1b458 --- /dev/null +++ b/test/spec/client-side-encryption/etc/data/lookup/schema-qe.json @@ -0,0 +1,20 @@ +{ + "escCollection": "enxcol_.qe.esc", + "ecocCollection": "enxcol_.qe.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "qe", + "bsonType": "string", + "queries": { + "queryType": "equality", + "contention": 0 + } + } + ] +} diff --git a/test/spec/client-side-encryption/etc/data/lookup/schema-qe2.json b/test/spec/client-side-encryption/etc/data/lookup/schema-qe2.json new file mode 100644 index 00000000000..77d5bd37cbb --- /dev/null +++ b/test/spec/client-side-encryption/etc/data/lookup/schema-qe2.json @@ -0,0 +1,20 @@ +{ + "escCollection": "enxcol_.qe2.esc", + "ecocCollection": "enxcol_.qe2.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "qe2", + "bsonType": "string", + "queries": { + "queryType": "equality", + "contention": 0 + } + } + ] +} diff --git a/test/spec/load-balancers/non-lb-connection-establishment.json b/test/spec/load-balancers/non-lb-connection-establishment.json index 6aaa7bdf98b..f4fed13cc23 100644 --- a/test/spec/load-balancers/non-lb-connection-establishment.json +++ b/test/spec/load-balancers/non-lb-connection-establishment.json @@ -57,6 +57,19 @@ "tests": [ { "description": "operations against non-load balanced clusters fail if URI contains loadBalanced=true", + "runOnRequirements": [ + { + "maxServerVersion": "8.0.99", + "topologies": [ + "single" + ] + }, + { + "topologies": [ + "sharded" + ] + } + ], "operations": [ { "name": "runCommand", diff --git a/test/spec/load-balancers/non-lb-connection-establishment.yml b/test/spec/load-balancers/non-lb-connection-establishment.yml index e805549ac0d..7eea3957ab4 100644 --- a/test/spec/load-balancers/non-lb-connection-establishment.yml +++ b/test/spec/load-balancers/non-lb-connection-establishment.yml @@ -42,6 +42,11 @@ tests: # If the server is not configured to be behind a load balancer and the URI contains loadBalanced=true, the driver # should error during the connection handshake because the server's hello response does not contain a serviceId field. - description: operations against non-load balanced clusters fail if URI contains loadBalanced=true + runOnRequirements: + - maxServerVersion: 8.0.99 # DRIVERS-3108: Skip test on >=8.1 mongod. SERVER-85804 changes a non-LB mongod to close connection. + topologies: [ single ] + - topologies: [ sharded ] + operations: - name: runCommand object: *lbTrueDatabase diff --git a/test/tools/runner/config.ts b/test/tools/runner/config.ts index 96d1f677a66..5385ace8cc2 100644 --- a/test/tools/runner/config.ts +++ b/test/tools/runner/config.ts @@ -13,6 +13,7 @@ import { type WriteConcernSettings } from '../../mongodb'; import { getEnvironmentalOptions } from '../utils'; +import { type Filter } from './filters/filter'; interface ProxyParams { proxyHost?: string; @@ -85,6 +86,7 @@ export class TestConfiguration { serverApi?: ServerApi; activeResources: number; isSrv: boolean; + filters: Record; constructor( private uri: string, @@ -129,6 +131,11 @@ export class TestConfiguration { password: url.password }; } + + this.filters = Object.fromEntries( + context.filters.map(filter => [filter.constructor.name, filter]) + ); + if (context.serverlessCredentials) { const { username, password } = context.serverlessCredentials; this.options.auth = { username, password, authSource: 'admin' }; diff --git a/test/tools/runner/filters/client_encryption_filter.ts b/test/tools/runner/filters/client_encryption_filter.ts index 79983816d77..3bb66df72fa 100644 --- a/test/tools/runner/filters/client_encryption_filter.ts +++ b/test/tools/runner/filters/client_encryption_filter.ts @@ -4,9 +4,44 @@ import * as process from 'process'; import { satisfies } from 'semver'; import { kmsCredentialsPresent } from '../../../csfle-kms-providers'; -import { type MongoClient } from '../../../mongodb'; +import { type AutoEncrypter, MongoClient } from '../../../mongodb'; import { Filter } from './filter'; +function getCryptSharedVersion(): AutoEncrypter['cryptSharedLibVersionInfo'] | null { + try { + const mc = new MongoClient('mongodb://localhost:27017', { + autoEncryption: { + kmsProviders: { + local: { + key: Buffer.alloc(96) + } + }, + extraOptions: { + cryptSharedLibPath: process.env.CRYPT_SHARED_LIB_PATH + } + } + }); + return mc.autoEncrypter.cryptSharedLibVersionInfo; + } catch { + try { + const mc = new MongoClient('mongodb://localhost:27017', { + autoEncryption: { + kmsProviders: { + local: { + key: Buffer.alloc(96) + } + } + } + }); + return mc.autoEncrypter.cryptSharedLibVersionInfo; + } catch { + // squash errors + } + } + + return null; +} + /** * Filter for whether or not a test needs / doesn't need Client Side Encryption * @@ -24,15 +59,18 @@ export class ClientSideEncryptionFilter extends Filter { enabled: boolean; static version = null; static libmongocrypt: string | null = null; + static cryptShared: AutoEncrypter['cryptSharedLibVersionInfo'] | null = null; override async initializeFilter(client: MongoClient, context: Record) { - let mongodbClientEncryption; + let mongodbClientEncryption: typeof import('mongodb-client-encryption'); try { // eslint-disable-next-line @typescript-eslint/no-require-imports mongodbClientEncryption = require('mongodb-client-encryption'); ClientSideEncryptionFilter.libmongocrypt = ( mongodbClientEncryption as typeof import('mongodb-client-encryption') ).MongoCrypt.libmongocryptVersion; + + ClientSideEncryptionFilter.cryptShared = getCryptSharedVersion(); } catch (failedToGetFLELib) { if (process.env.TEST_CSFLE) { console.error({ failedToGetFLELib }); @@ -53,7 +91,8 @@ export class ClientSideEncryptionFilter extends Filter { enabled: this.enabled, mongodbClientEncryption, version: ClientSideEncryptionFilter.version, - libmongocrypt: ClientSideEncryptionFilter.libmongocrypt + libmongocrypt: ClientSideEncryptionFilter.libmongocrypt, + cryptShared: ClientSideEncryptionFilter.cryptShared }; } diff --git a/test/tools/runner/filters/filter.ts b/test/tools/runner/filters/filter.ts index b03ad83d5e9..6251cf44c8c 100644 --- a/test/tools/runner/filters/filter.ts +++ b/test/tools/runner/filters/filter.ts @@ -1,5 +1,3 @@ -import { type Test } from 'mocha'; - import { type MongoClient } from '../../../mongodb'; export abstract class Filter { @@ -7,5 +5,5 @@ export abstract class Filter { return; } - abstract filter(test: Test): string | boolean; + abstract filter(test: { metadata?: MongoDBMetadataUI }): string | boolean; } diff --git a/test/tools/runner/hooks/configuration.ts b/test/tools/runner/hooks/configuration.ts index 1af5e7a6ee6..d6c4100f339 100644 --- a/test/tools/runner/hooks/configuration.ts +++ b/test/tools/runner/hooks/configuration.ts @@ -51,20 +51,22 @@ async function initializeFilters(client): Promise> { return {}; } initializedFilters = true; - const context = {}; - - for (const filter of [ - new ApiVersionFilter(), - new AuthFilter(), - new ClientSideEncryptionFilter(), - new GenericPredicateFilter(), - new IDMSMockServerFilter(), - new MongoDBTopologyFilter(), - new MongoDBVersionFilter(), - new NodeVersionFilter(), - new OSFilter(), - new ServerlessFilter() - ]) { + const context = { + filters: [ + new ApiVersionFilter(), + new AuthFilter(), + new ClientSideEncryptionFilter(), + new GenericPredicateFilter(), + new IDMSMockServerFilter(), + new MongoDBTopologyFilter(), + new MongoDBVersionFilter(), + new NodeVersionFilter(), + new OSFilter(), + new ServerlessFilter() + ] + }; + + for (const filter of context.filters) { filters.push(filter); await filter.initializeFilter(client, context); } @@ -176,9 +178,7 @@ const testConfigBeforeHook = async function () { auth: process.env.AUTH === 'auth', tls: process.env.SSL === 'ssl', csfle: { - enabled: this.configuration.clientSideEncryption.enabled, - version: this.configuration.clientSideEncryption.version, - libmongocrypt: this.configuration.clientSideEncryption.libmongocrypt + ...this.configuration.clientSideEncryption }, serverApi: MONGODB_API_VERSION, atlas: process.env.ATLAS_CONNECTIVITY != null, diff --git a/test/tools/spec-runner/index.js b/test/tools/spec-runner/index.js index f312990137c..62ee50ff71e 100644 --- a/test/tools/spec-runner/index.js +++ b/test/tools/spec-runner/index.js @@ -15,7 +15,6 @@ const { HEARTBEAT_EVENTS } = require('../../mongodb'); const { isAnyRequirementSatisfied } = require('../unified-spec-runner/unified-utils'); -const { ClientSideEncryptionFilter } = require('../runner/filters/client_encryption_filter'); const { getCSFLEKMSProviders } = require('../../csfle-kms-providers'); // Promise.try alternative https://stackoverflow.com/questions/60624081/promise-try-without-bluebird/60624164?noredirect=1#comment107255389_60624164 @@ -153,7 +152,7 @@ function legacyRunOnToRunOnRequirement(runOn) { } /** - * @param {((test: { description: string }) => true | string)?} filter a function that returns true for any tests that should run, false otherwise. + * @param {((test: { description: string }, configuration: TestConfiguration) => true | string)?} filter a function that returns true for any tests that should run, false otherwise. */ function generateTopologyTests(testSuites, testContext, filter) { for (const testSuite of testSuites) { @@ -198,10 +197,8 @@ function generateTopologyTests(testSuites, testContext, filter) { let csfleFilterError = null; if (shouldRun && testContext.requiresCSFLE) { - const csfleFilter = new ClientSideEncryptionFilter(); - await csfleFilter.initializeFilter(null, {}); try { - const filterResult = csfleFilter.filter({ + const filterResult = this.configuration.filters.ClientSideEncryptionFilter.filter({ metadata: { requires: { clientSideEncryption: true } } }); if (typeof filterResult === 'string') { diff --git a/test/types/community/collection/findX.test-d.ts b/test/types/community/collection/findX.test-d.ts index 8720c949400..0de99ee7d49 100644 --- a/test/types/community/collection/findX.test-d.ts +++ b/test/types/community/collection/findX.test-d.ts @@ -388,3 +388,14 @@ expectType | null>( } ) ); + +// the update operator can be an aggregation pipeline +expectType | null>( + await coll.findOneAndUpdate({ a: 3 }, [ + { + $set: { + a: 5 + } + } + ]) +); diff --git a/test/unit/client-side-encryption/auto_encrypter.test.ts b/test/unit/client-side-encryption/auto_encrypter.test.ts index 79bc321b802..816b3a6cb93 100644 --- a/test/unit/client-side-encryption/auto_encrypter.test.ts +++ b/test/unit/client-side-encryption/auto_encrypter.test.ts @@ -63,7 +63,10 @@ describe('AutoEncrypter', function () { return Promise.resolve(); }); - sandbox.stub(StateMachine.prototype, 'fetchCollectionInfo').resolves(MOCK_COLLINFO_RESPONSE); + const iterator = (async function* () { + yield BSON.deserialize(MOCK_COLLINFO_RESPONSE); + })(); + sandbox.stub(StateMachine.prototype, 'fetchCollectionInfo').returns(iterator); sandbox.stub(StateMachine.prototype, 'markCommand').callsFake(() => { if (ENABLE_LOG_TEST) { diff --git a/test/unit/client-side-encryption/state_machine.test.ts b/test/unit/client-side-encryption/state_machine.test.ts index 3d6a92765a8..1f43b57007b 100644 --- a/test/unit/client-side-encryption/state_machine.test.ts +++ b/test/unit/client-side-encryption/state_machine.test.ts @@ -580,11 +580,21 @@ describe('StateMachine', function () { serverSelectionTimeoutMS: 30000 }); await sleep(300); - await stateMachine - .fetchCollectionInfo(client, 'keyVault', BSON.serialize({ a: 1 }), { - timeoutContext: context - }) - .catch(e => squashError(e)); + + try { + const cursor = stateMachine.fetchCollectionInfo( + client, + 'keyVault', + BSON.serialize({ a: 1 }), + { + timeoutContext: context + } + ); + for await (const doc of cursor) void doc; + } catch { + // ignore + } + const [_filter, { timeoutContext }] = listCollectionsSpy.getCalls()[0].args; expect(timeoutContext).to.exist; expect(timeoutContext.timeoutContext).to.equal(context); @@ -596,9 +606,16 @@ describe('StateMachine', function () { 'when StateMachine.fetchCollectionInfo() is not passed a `CSOTimeoutContext`', function () { it('no timeoutContext is provided to listCollections', async function () { - await stateMachine - .fetchCollectionInfo(client, 'keyVault', BSON.serialize({ a: 1 })) - .catch(e => squashError(e)); + try { + const cursor = stateMachine.fetchCollectionInfo( + client, + 'keyVault', + BSON.serialize({ a: 1 }) + ); + for await (const doc of cursor) void doc; + } catch { + // ignore + } const [_filter, { timeoutContext }] = listCollectionsSpy.getCalls()[0].args; expect(timeoutContext).not.to.exist; });