diff --git a/.gitignore b/.gitignore index f04e671f84f..d0fdc406efd 100644 --- a/.gitignore +++ b/.gitignore @@ -99,6 +99,7 @@ test/lambda/env.json # files generated by tooling in drivers-evergreen-tools secrets-export.sh +secrets-export.fish mo-expansion.sh mo-expansion.yml expansions.sh diff --git a/etc/bash_to_fish.mjs b/etc/bash_to_fish.mjs new file mode 100644 index 00000000000..09cfe054110 --- /dev/null +++ b/etc/bash_to_fish.mjs @@ -0,0 +1,39 @@ +import { createReadStream, promises as fs } from 'node:fs'; +import path from 'node:path'; +import readline from 'node:readline/promises'; + +/** + * Takes an "exports" only bash script file + * and converts it to fish syntax. + * Will crash on any line that isn't: + * - a comment + * - an empty line + * - a bash 'set' call + * - export VAR=VAL + */ + +const fileName = process.argv[2]; +const outFileName = path.basename(fileName, '.sh') + '.fish'; +const input = createReadStream(process.argv[2]); +const lines = readline.createInterface({ input }); +const output = await fs.open(outFileName, 'w'); + +for await (let line of lines) { + line = line.trim(); + + if (!line.startsWith('export ')) { + if (line.startsWith('#')) continue; + if (line === '') continue; + if (line.startsWith('set')) continue; + throw new Error('Cannot translate: ' + line); + } + + const varVal = line.slice('export '.length); + const variable = varVal.slice(0, varVal.indexOf('=')); + const value = varVal.slice(varVal.indexOf('=') + 1); + await output.appendFile(`set -x ${variable} ${value}\n`); +} + +output.close(); +input.close(); +lines.close(); diff --git a/package-lock.json b/package-lock.json index ea9ce112329..fa56b4aa307 100644 --- a/package-lock.json +++ b/package-lock.json @@ -48,7 +48,7 @@ "js-yaml": "^4.1.0", "mocha": "^10.8.2", "mocha-sinon": "^2.1.2", - "mongodb-client-encryption": "^6.2.0", + "mongodb-client-encryption": "^6.3.0", "mongodb-legacy": "^6.1.3", "nyc": "^15.1.0", "prettier": "^3.4.2", @@ -6693,9 +6693,9 @@ } }, "node_modules/mongodb-client-encryption": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/mongodb-client-encryption/-/mongodb-client-encryption-6.2.0.tgz", - "integrity": "sha512-jfOCthPH0jxd9RJCerNbf1aRAcUJFwiWikJ2j9oBPRc+Oets3aKUriyZe4n16sF3Ibc1xar1zNInAfHEcVtYRg==", + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/mongodb-client-encryption/-/mongodb-client-encryption-6.3.0.tgz", + "integrity": "sha512-OaOg02vglPxxrfY01alC0ER0W4WMuNO2ZJR3ehAUcuGYreJaJ+aX+rUQiQkdQHiXvnVPDUx/4QDr2CR1/FvpcQ==", "dev": true, "hasInstallScript": true, "license": "Apache-2.0", diff --git a/package.json b/package.json index fb7f68574b4..1e5a8b1f394 100644 --- a/package.json +++ b/package.json @@ -96,7 +96,7 @@ "js-yaml": "^4.1.0", "mocha": "^10.8.2", "mocha-sinon": "^2.1.2", - "mongodb-client-encryption": "^6.2.0", + "mongodb-client-encryption": "^6.3.0", "mongodb-legacy": "^6.1.3", "nyc": "^15.1.0", "prettier": "^3.4.2", diff --git a/src/client-side-encryption/auto_encrypter.ts b/src/client-side-encryption/auto_encrypter.ts index 1d7a9de4c66..a24f8cd6da6 100644 --- a/src/client-side-encryption/auto_encrypter.ts +++ b/src/client-side-encryption/auto_encrypter.ts @@ -239,6 +239,7 @@ export class AutoEncrypter { this._kmsProviders = options.kmsProviders || {}; const mongoCryptOptions: MongoCryptOptions = { + enableMultipleCollinfo: true, cryptoCallbacks }; if (options.schemaMap) { diff --git a/src/client-side-encryption/state_machine.ts b/src/client-side-encryption/state_machine.ts index 096c4cfc635..c45fdf1f093 100644 --- a/src/client-side-encryption/state_machine.ts +++ b/src/client-side-encryption/state_machine.ts @@ -16,6 +16,7 @@ import { getSocks, type SocksLib } from '../deps'; import { MongoOperationTimeoutError } from '../error'; import { type MongoClient, type MongoClientOptions } from '../mongo_client'; import { type Abortable } from '../mongo_types'; +import { type CollectionInfo } from '../operations/list_collections'; import { Timeout, type TimeoutContext, TimeoutError } from '../timeout'; import { addAbortListener, @@ -205,11 +206,19 @@ export class StateMachine { const mongocryptdManager = executor._mongocryptdManager; let result: Uint8Array | null = null; - while (context.state !== MONGOCRYPT_CTX_DONE && context.state !== MONGOCRYPT_CTX_ERROR) { + // Typescript treats getters just like properties: Once you've tested it for equality + // it cannot change. Which is exactly the opposite of what we use state and status for. + // Every call to at least `addMongoOperationResponse` and `finalize` can change the state. + // These wrappers let us write code more naturally and not add compiler exceptions + // to conditions checks inside the state machine. + const getStatus = () => context.status; + const getState = () => context.state; + + while (getState() !== MONGOCRYPT_CTX_DONE && getState() !== MONGOCRYPT_CTX_ERROR) { options.signal?.throwIfAborted(); - debug(`[context#${context.id}] ${stateToString.get(context.state) || context.state}`); + debug(`[context#${context.id}] ${stateToString.get(getState()) || getState()}`); - switch (context.state) { + switch (getState()) { case MONGOCRYPT_CTX_NEED_MONGO_COLLINFO: { const filter = deserialize(context.nextMongoOperation()); if (!metaDataClient) { @@ -218,22 +227,28 @@ export class StateMachine { ); } - const collInfo = await this.fetchCollectionInfo( + const collInfoCursor = this.fetchCollectionInfo( metaDataClient, context.ns, filter, options ); - if (collInfo) { - context.addMongoOperationResponse(collInfo); + + for await (const collInfo of collInfoCursor) { + context.addMongoOperationResponse(serialize(collInfo)); + if (getState() === MONGOCRYPT_CTX_ERROR) break; } + if (getState() === MONGOCRYPT_CTX_ERROR) break; + context.finishMongoOperation(); break; } case MONGOCRYPT_CTX_NEED_MONGO_MARKINGS: { const command = context.nextMongoOperation(); + if (getState() === MONGOCRYPT_CTX_ERROR) break; + if (!mongocryptdClient) { throw new MongoCryptError( 'unreachable state machine state: entered MONGOCRYPT_CTX_NEED_MONGO_MARKINGS but mongocryptdClient is undefined' @@ -283,9 +298,8 @@ export class StateMachine { case MONGOCRYPT_CTX_READY: { const finalizedContext = context.finalize(); - // @ts-expect-error finalize can change the state, check for error - if (context.state === MONGOCRYPT_CTX_ERROR) { - const message = context.status.message || 'Finalization error'; + if (getState() === MONGOCRYPT_CTX_ERROR) { + const message = getStatus().message || 'Finalization error'; throw new MongoCryptError(message); } result = finalizedContext; @@ -293,12 +307,12 @@ export class StateMachine { } default: - throw new MongoCryptError(`Unknown state: ${context.state}`); + throw new MongoCryptError(`Unknown state: ${getState()}`); } } - if (context.state === MONGOCRYPT_CTX_ERROR || result == null) { - const message = context.status.message; + if (getState() === MONGOCRYPT_CTX_ERROR || result == null) { + const message = getStatus().message; if (!message) { debug( `unidentifiable error in MongoCrypt - received an error status from \`libmongocrypt\` but received no error message.` @@ -527,12 +541,12 @@ export class StateMachine { * @param filter - A filter for the listCollections command * @param callback - Invoked with the info of the requested collection, or with an error */ - async fetchCollectionInfo( + fetchCollectionInfo( client: MongoClient, ns: string, filter: Document, options?: { timeoutContext?: TimeoutContext } & Abortable - ): Promise { + ): AsyncIterable { const { db } = MongoDBCollectionNamespace.fromString(ns); const cursor = client.db(db).listCollections(filter, { @@ -540,16 +554,11 @@ export class StateMachine { promoteValues: false, timeoutContext: options?.timeoutContext && new CursorTimeoutContext(options?.timeoutContext, Symbol()), - signal: options?.signal + signal: options?.signal, + nameOnly: false }); - // There is always exactly zero or one matching documents, so this should always exhaust the cursor - // in a single batch. We call `toArray()` just to be safe and ensure that the cursor is always - // exhausted and closed. - const collections = await cursor.toArray(); - - const info = collections.length > 0 ? serialize(collections[0]) : null; - return info; + return cursor; } /** diff --git a/test/integration/client-side-encryption/client_side_encryption.prose.25.lookup.test.ts b/test/integration/client-side-encryption/client_side_encryption.prose.25.lookup.test.ts new file mode 100644 index 00000000000..d4365ac6d6d --- /dev/null +++ b/test/integration/client-side-encryption/client_side_encryption.prose.25.lookup.test.ts @@ -0,0 +1,432 @@ +import * as fs from 'node:fs/promises'; +import * as path from 'node:path'; + +import { expect } from 'chai'; +import { type MongoCryptOptions } from 'mongodb-client-encryption'; +import * as sinon from 'sinon'; + +import { getCSFLEKMSProviders } from '../../csfle-kms-providers'; +import { AutoEncrypter, BSON, type Document, type MongoClient } from '../../mongodb'; +import { type TestConfiguration } from '../../tools/runner/config'; +import { getEncryptExtraOptions } from '../../tools/utils'; + +const defaultMetadata: MongoDBMetadataUI = { + requires: { + topology: '!single', + clientSideEncryption: '>=6.3.0', + mongodb: '>=7.0.0' + } +}; + +const readFixture = async (name: string) => + BSON.EJSON.parse( + await fs.readFile( + path.resolve(__dirname, `../../spec/client-side-encryption/etc/data/lookup/${name}`), + 'utf8' + ) + ); + +const newEncryptedClient = ({ configuration }: { configuration: TestConfiguration }) => + configuration.newClient( + {}, + { + writeConcern: { w: 'majority' }, + autoEncryption: { + keyVaultNamespace: 'db.keyvault', + kmsProviders: { local: getCSFLEKMSProviders().local }, + extraOptions: getEncryptExtraOptions() + } + } + ); + +describe('$lookup support', defaultMetadata, function () { + before(async function () { + const mochaTest = { metadata: defaultMetadata }; + + if (!this.configuration.filters.MongoDBVersionFilter.filter(mochaTest)) { + return; + } + + if (!this.configuration.filters.MongoDBTopologyFilter.filter(mochaTest)) { + return; + } + + if (!this.configuration.filters.ClientSideEncryptionFilter.filter(mochaTest)) { + return; + } + + let unencryptedClient: MongoClient, encryptedClient: MongoClient; + try { + /** + * Create an encrypted MongoClient configured with: + * + * ```txt + * AutoEncryptionOpts( + * keyVaultNamespace="db.keyvault", + * kmsProviders={"local": { "key": "" }} + * ) + * ``` + */ + encryptedClient = newEncryptedClient(this); + + /** Drop database db. */ + await encryptedClient.db('db').dropDatabase(); + + /** Insert `key-doc.json` into db.keyvault. */ + const keyDoc = await readFixture('key-doc.json'); + await encryptedClient.db('db').collection('keyvault').insertOne(keyDoc); + + /** + * Create the following collections: + * ``` + * db.csfle with options: { "validator": { "$jsonSchema": ""}}. + * db.csfle2 with options: { "validator": { "$jsonSchema": ""}}. + * db.qe with options: { "encryptedFields": ""}. + * db.qe2 with options: { "encryptedFields": ""}. + * db.no_schema with no options. + * db.no_schema2 with no options. + * ``` + */ + const collections = [ + { + name: 'csfle', + options: { validator: { $jsonSchema: await readFixture('schema-csfle.json') } }, + document: { csfle: 'csfle' } + }, + { + name: 'csfle2', + options: { validator: { $jsonSchema: await readFixture('schema-csfle2.json') } }, + document: { csfle2: 'csfle2' } + }, + { + name: 'qe', + options: { encryptedFields: await readFixture('schema-qe.json') }, + document: { qe: 'qe' } + }, + { + name: 'qe2', + options: { encryptedFields: await readFixture('schema-qe2.json') }, + document: { qe2: 'qe2' } + }, + { + name: 'no_schema', + options: {}, + document: { no_schema: 'no_schema' } + }, + { + name: 'no_schema2', + options: {}, + document: { no_schema2: 'no_schema2' } + } + ]; + + for (const { name, options } of collections) { + await encryptedClient.db('db').createCollection(name, options); + } + + /** Create an unencrypted MongoClient. */ + unencryptedClient = this.configuration.newClient({}, { writeConcern: { w: 'majority' } }); + + /** + * ``` + * {"csfle": "csfle"} into db.csfle + * Use the unencrypted client to retrieve it. Assert the csfle field is BSON binary. + * {"csfle2": "csfle2"} into db.csfle2 + * Use the unencrypted client to retrieve it. Assert the csfle2 field is BSON binary. + * {"qe": "qe"} into db.qe + * Use the unencrypted client to retrieve it. Assert the qe field is BSON binary. + * {"qe2": "qe2"} into db.qe2 + * Use the unencrypted client to retrieve it. Assert the qe2 field is BSON binary. + * {"no_schema": "no_schema"} into db.no_schema + * {"no_schema2": "no_schema2"} into db.no_schema2 + * ``` + */ + for (const { name, document } of collections) { + const { insertedId } = await encryptedClient.db('db').collection(name).insertOne(document); + + if (name.startsWith('no_')) continue; + + expect(await unencryptedClient.db('db').collection(name).findOne(insertedId)) + .to.have.property(Object.keys(document)[0]) + .that.has.property('_bsontype', 'Binary'); + } + } finally { + await unencryptedClient?.close(); + await encryptedClient?.close(); + } + }); + + const test = function ( + title: string, + collName: string, + pipeline: Document[], + expected: Document | RegExp, + metadata?: MongoDBMetadataUI + ) { + describe(title.slice(0, title.indexOf(':')), function () { + let client: MongoClient; + + beforeEach(async function () { + client = newEncryptedClient(this); + }); + + afterEach(async function () { + await client.close(); + }); + + it(title.slice(title.indexOf(':') + 1).trim(), metadata ?? defaultMetadata, async () => { + const collection = client.db('db').collection(collName); + const actual = await collection + .aggregate(pipeline) + .toArray() + .catch(error => error); + + const expectedError = expected instanceof RegExp; + + if (expectedError) { + expect(actual).to.be.instanceOf(Error); + if (!expected.test(actual.message)) { + throw actual; + } + } else if (actual instanceof Error) { + throw actual; + } else { + expect(actual).to.have.lengthOf(1); + expect(actual[0]).to.deep.equal(expected); + } + }); + }); + }; + + test( + 'Case 1: db.csfle joins db.no_schema', + 'csfle', + [ + { $match: { csfle: 'csfle' } }, + { + $lookup: { + from: 'no_schema', + as: 'matched', + pipeline: [{ $match: { no_schema: 'no_schema' } }, { $project: { _id: 0 } }] + } + }, + { $project: { _id: 0 } } + ], + { csfle: 'csfle', matched: [{ no_schema: 'no_schema' }] }, + { requires: { ...defaultMetadata.requires, mongodb: '>=8.1.0' } } + ); + + test( + 'Case 2: db.qe joins db.no_schema', + 'qe', + [ + { $match: { qe: 'qe' } }, + { + $lookup: { + from: 'no_schema', + as: 'matched', + pipeline: [ + { $match: { no_schema: 'no_schema' } }, + { $project: { _id: 0, __safeContent__: 0 } } + ] + } + }, + { $project: { _id: 0, __safeContent__: 0 } } + ], + { qe: 'qe', matched: [{ no_schema: 'no_schema' }] }, + { requires: { ...defaultMetadata.requires, mongodb: '>=8.1.0' } } + ); + + test( + 'Case 3: db.no_schema joins db.csfle', + 'no_schema', + [ + { $match: { no_schema: 'no_schema' } }, + { + $lookup: { + from: 'csfle', + as: 'matched', + pipeline: [{ $match: { csfle: 'csfle' } }, { $project: { _id: 0 } }] + } + }, + { $project: { _id: 0 } } + ], + { no_schema: 'no_schema', matched: [{ csfle: 'csfle' }] }, + { requires: { ...defaultMetadata.requires, mongodb: '>=8.1.0' } } + ); + + test( + 'Case 4: db.no_schema joins db.qe', + 'no_schema', + [ + { $match: { no_schema: 'no_schema' } }, + { + $lookup: { + from: 'qe', + as: 'matched', + pipeline: [{ $match: { qe: 'qe' } }, { $project: { _id: 0, __safeContent__: 0 } }] + } + }, + { $project: { _id: 0 } } + ], + { no_schema: 'no_schema', matched: [{ qe: 'qe' }] }, + { requires: { ...defaultMetadata.requires, mongodb: '>=8.1.0' } } + ); + + test( + 'Case 5: db.csfle joins db.csfle2', + 'csfle', + [ + { $match: { csfle: 'csfle' } }, + { + $lookup: { + from: 'csfle2', + as: 'matched', + pipeline: [{ $match: { csfle2: 'csfle2' } }, { $project: { _id: 0 } }] + } + }, + { $project: { _id: 0 } } + ], + { csfle: 'csfle', matched: [{ csfle2: 'csfle2' }] }, + { requires: { ...defaultMetadata.requires, mongodb: '>=8.1.0' } } + ); + + test( + 'Case 6: db.qe joins db.qe2', + 'qe', + [ + { $match: { qe: 'qe' } }, + { + $lookup: { + from: 'qe2', + as: 'matched', + pipeline: [{ $match: { qe2: 'qe2' } }, { $project: { _id: 0, __safeContent__: 0 } }] + } + }, + { $project: { _id: 0, __safeContent__: 0 } } + ], + { qe: 'qe', matched: [{ qe2: 'qe2' }] }, + { requires: { ...defaultMetadata.requires, mongodb: '>=8.1.0' } } + ); + + test( + 'Case 7: db.no_schema joins db.no_schema2', + 'no_schema', + [ + { $match: { no_schema: 'no_schema' } }, + { + $lookup: { + from: 'no_schema2', + as: 'matched', + pipeline: [{ $match: { no_schema2: 'no_schema2' } }, { $project: { _id: 0 } }] + } + }, + { $project: { _id: 0 } } + ], + { no_schema: 'no_schema', matched: [{ no_schema2: 'no_schema2' }] }, + { requires: { ...defaultMetadata.requires, mongodb: '>=8.1.0' } } + ); + + test( + 'Case 8: db.csfle joins db.qe', + 'csfle', + [ + { $match: { csfle: 'qe' } }, + { + $lookup: { + from: 'qe', + as: 'matched', + pipeline: [{ $match: { qe: 'qe' } }, { $project: { _id: 0 } }] + } + }, + { $project: { _id: 0 } } + ], + /not supported/i, + { requires: { ...defaultMetadata.requires, mongodb: '>=8.1.0' } } + ); + + test( + 'Case 9: test error with <8.1', + 'csfle', + [ + { $match: { csfle: 'csfle' } }, + { + $lookup: { + from: 'no_schema', + as: 'matched', + pipeline: [{ $match: { no_schema: 'no_schema' } }, { $project: { _id: 0 } }] + } + }, + { $project: { _id: 0 } } + ], + /Upgrade/i, + { requires: { ...defaultMetadata.requires, mongodb: '>=7.0.0 <8.1.0' } } + ); + + describe('Node.js custom test', function () { + describe('when enableMultipleCollinfo is off and a $lookup is run', function () { + let client: MongoClient; + + beforeEach(async function () { + const mochaTest = { metadata: defaultMetadata }; + + if (!this.configuration.filters.MongoDBVersionFilter.filter(mochaTest)) { + return; + } + + if (!this.configuration.filters.MongoDBTopologyFilter.filter(mochaTest)) { + return; + } + + if (!this.configuration.filters.ClientSideEncryptionFilter.filter(mochaTest)) { + return; + } + + const getMongoCrypt = sinon.stub(AutoEncrypter, 'getMongoCrypt').callsFake(function () { + const MongoCrypt = getMongoCrypt.wrappedMethod.call(this); + return class extends MongoCrypt { + constructor(options: MongoCryptOptions) { + expect(options).to.have.property('enableMultipleCollinfo', true); // assert invariant + options.enableMultipleCollinfo = false; + super(options); + } + }; + }); + + client = newEncryptedClient(this); + }); + + afterEach(async function () { + sinon.restore(); + await client?.close(); + }); + + it( + 'throws a TypeError about libmongocrypt not enabled to support multiple collections', + defaultMetadata, + async () => { + const collection = client.db('db').collection('csfle'); + const actual = await collection + .aggregate([ + { $match: { csfle: 'csfle' } }, + { + $lookup: { + from: 'csfle2', + as: 'matched', + pipeline: [{ $match: { csfle2: 'csfle2' } }, { $project: { _id: 0 } }] + } + }, + { $project: { _id: 0 } } + ]) + .toArray() + .catch(error => error); + + expect(actual).to.be.instanceOf(TypeError); + expect(actual.message).to.match( + /libmongocrypt is not configured to support encrypting a command with multiple collections/i + ); + } + ); + }); + }); +}); diff --git a/test/integration/client-side-encryption/client_side_encryption.prose.test.js b/test/integration/client-side-encryption/client_side_encryption.prose.test.js index afbb83b7a8a..0e773654ec7 100644 --- a/test/integration/client-side-encryption/client_side_encryption.prose.test.js +++ b/test/integration/client-side-encryption/client_side_encryption.prose.test.js @@ -16,9 +16,6 @@ const { } = require('../../spec/client-side-encryption/external/external-schema.json'); /* eslint-disable no-restricted-modules */ const { ClientEncryption } = require('../../../src/client-side-encryption/client_encryption'); -const { - ClientSideEncryptionFilter -} = require('../../tools/runner/filters/client_encryption_filter'); const { getCSFLEKMSProviders } = require('../../csfle-kms-providers'); const { AlpineTestConfiguration } = require('../../tools/runner/config'); @@ -1703,9 +1700,7 @@ describe('Client Side Encryption Prose Tests', metadata, function () { context('Case 6: named KMS providers apply TLS options', function () { afterEach(() => keyvaultClient?.close()); beforeEach(async function () { - const filter = new ClientSideEncryptionFilter(); - await filter.initializeFilter({}, {}); - const shouldSkip = filter.filter({ + const shouldSkip = this.configuration.filters.ClientSideEncryptionFilter.filter({ metadata: { requires: { // 6.0.1 includes libmongocrypt 1.10. diff --git a/test/integration/client-side-encryption/client_side_encryption.spec.test.ts b/test/integration/client-side-encryption/client_side_encryption.spec.test.ts index 36b20f4460d..58fe5bb19bc 100644 --- a/test/integration/client-side-encryption/client_side_encryption.spec.test.ts +++ b/test/integration/client-side-encryption/client_side_encryption.spec.test.ts @@ -1,7 +1,6 @@ import * as path from 'path'; import { loadSpecTests } from '../../spec'; -import { ClientSideEncryptionFilter } from '../../tools/runner/filters/client_encryption_filter'; import { gatherTestSuites, generateTopologyTests, @@ -60,8 +59,6 @@ const SKIPPED_TESTS = new Set([ const isServerless = !!process.env.SERVERLESS; -const filter = new ClientSideEncryptionFilter(); - describe('Client Side Encryption (Legacy)', function () { const testContext = new TestRunnerContext({ requiresCSFLE: true }); const testSuites = gatherTestSuites( @@ -75,11 +72,7 @@ describe('Client Side Encryption (Legacy)', function () { return testContext.setup(this.configuration); }); - before(async function () { - await filter.initializeFilter({} as any, {}); - }); - - generateTopologyTests(testSuites, testContext, test => { + generateTopologyTests(testSuites, testContext, (test, configuration) => { const { description } = test; if (SKIPPED_TESTS.has(description)) { return 'Skipped by generic test name skip filter.'; @@ -109,7 +102,7 @@ describe('Client Side Encryption (Legacy)', function () { 'Automatically encrypt and decrypt with a named KMS provider' ].includes(description) ) { - const result = filter.filter({ + const result = configuration.filters.ClientSideEncryptionFilter.filter({ metadata: { requires: { clientSideEncryption: '>=6.0.1' } } }); @@ -121,13 +114,9 @@ describe('Client Side Encryption (Legacy)', function () { }); describe('Client Side Encryption (Unified)', function () { - before(async function () { - await filter.initializeFilter({} as any, {}); - }); - runUnifiedSuite( loadSpecTests(path.join('client-side-encryption', 'tests', 'unified')), - ({ description }) => { + ({ description }, configuration) => { const delegatedKMIPTests = [ 'rewrap with current KMS provider', 'rewrap with new local KMS provider', @@ -154,7 +143,7 @@ describe('Client Side Encryption (Unified)', function () { 'can explicitly encrypt with a named KMS provider' ]; if (delegatedKMIPTests.includes(description)) { - const shouldSkip = filter.filter({ + const shouldSkip = configuration.filters.ClientSideEncryptionFilter.filter({ metadata: { requires: { clientSideEncryption: '>=6.0.1' } } }); if (typeof shouldSkip === 'string') return shouldSkip; diff --git a/test/integration/client-side-encryption/driver.test.ts b/test/integration/client-side-encryption/driver.test.ts index 991b93491d5..8862b6ac41a 100644 --- a/test/integration/client-side-encryption/driver.test.ts +++ b/test/integration/client-side-encryption/driver.test.ts @@ -1047,11 +1047,19 @@ describe('CSOT', function () { }); it('the command should fail due to a timeout error', metadata, async function () { - const { duration, result: error } = await measureDuration(() => - stateMachine - .fetchCollectionInfo(encryptedClient, 'test.test', { a: 1 }, timeoutContext()) - .catch(e => e) - ); + const { duration, result: error } = await measureDuration(async () => { + try { + const cursor = stateMachine.fetchCollectionInfo( + encryptedClient, + 'test.test', + { a: 1 }, + timeoutContext() + ); + for await (const doc of cursor) void doc; + } catch (error) { + return error; + } + }); expect(error).to.be.instanceOf(MongoOperationTimeoutError); expect(duration).to.be.within(timeoutMS - 100, timeoutMS + 100); }); @@ -1074,7 +1082,8 @@ describe('CSOT', function () { }); it('the command succeeds', metadata, async function () { - await stateMachine.fetchCollectionInfo(encryptedClient, 'test.test', { a: 1 }); + const cursor = stateMachine.fetchCollectionInfo(encryptedClient, 'test.test', { a: 1 }); + for await (const doc of cursor) void doc; }); } ); diff --git a/test/integration/crud/crud.prose.test.ts b/test/integration/crud/crud.prose.test.ts index 8a0d80cc139..0823dcf5d60 100644 --- a/test/integration/crud/crud.prose.test.ts +++ b/test/integration/crud/crud.prose.test.ts @@ -1032,7 +1032,7 @@ describe('CRUD Prose Spec Tests', () => { }); afterEach(async function () { - await client.close(); + await client?.close(); }); it('raises a client side error', async function () { diff --git a/test/spec/client-side-encryption/etc/data/lookup/key-doc.json b/test/spec/client-side-encryption/etc/data/lookup/key-doc.json new file mode 100644 index 00000000000..566b56c354f --- /dev/null +++ b/test/spec/client-side-encryption/etc/data/lookup/key-doc.json @@ -0,0 +1,30 @@ +{ + "_id": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "keyMaterial": { + "$binary": { + "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==", + "subType": "00" + } + }, + "creationDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "updateDate": { + "$date": { + "$numberLong": "1648914851981" + } + }, + "status": { + "$numberInt": "0" + }, + "masterKey": { + "provider": "local" + } +} diff --git a/test/spec/client-side-encryption/etc/data/lookup/schema-csfle.json b/test/spec/client-side-encryption/etc/data/lookup/schema-csfle.json new file mode 100644 index 00000000000..29ac9ad5da4 --- /dev/null +++ b/test/spec/client-side-encryption/etc/data/lookup/schema-csfle.json @@ -0,0 +1,19 @@ +{ + "properties": { + "csfle": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" +} diff --git a/test/spec/client-side-encryption/etc/data/lookup/schema-csfle2.json b/test/spec/client-side-encryption/etc/data/lookup/schema-csfle2.json new file mode 100644 index 00000000000..3f1c02781c5 --- /dev/null +++ b/test/spec/client-side-encryption/etc/data/lookup/schema-csfle2.json @@ -0,0 +1,19 @@ +{ + "properties": { + "csfle2": { + "encrypt": { + "keyId": [ + { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + } + ], + "bsonType": "string", + "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic" + } + } + }, + "bsonType": "object" +} diff --git a/test/spec/client-side-encryption/etc/data/lookup/schema-qe.json b/test/spec/client-side-encryption/etc/data/lookup/schema-qe.json new file mode 100644 index 00000000000..9428ea1b458 --- /dev/null +++ b/test/spec/client-side-encryption/etc/data/lookup/schema-qe.json @@ -0,0 +1,20 @@ +{ + "escCollection": "enxcol_.qe.esc", + "ecocCollection": "enxcol_.qe.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "qe", + "bsonType": "string", + "queries": { + "queryType": "equality", + "contention": 0 + } + } + ] +} diff --git a/test/spec/client-side-encryption/etc/data/lookup/schema-qe2.json b/test/spec/client-side-encryption/etc/data/lookup/schema-qe2.json new file mode 100644 index 00000000000..77d5bd37cbb --- /dev/null +++ b/test/spec/client-side-encryption/etc/data/lookup/schema-qe2.json @@ -0,0 +1,20 @@ +{ + "escCollection": "enxcol_.qe2.esc", + "ecocCollection": "enxcol_.qe2.ecoc", + "fields": [ + { + "keyId": { + "$binary": { + "base64": "EjRWeBI0mHYSNBI0VniQEg==", + "subType": "04" + } + }, + "path": "qe2", + "bsonType": "string", + "queries": { + "queryType": "equality", + "contention": 0 + } + } + ] +} diff --git a/test/tools/runner/config.ts b/test/tools/runner/config.ts index 96d1f677a66..5385ace8cc2 100644 --- a/test/tools/runner/config.ts +++ b/test/tools/runner/config.ts @@ -13,6 +13,7 @@ import { type WriteConcernSettings } from '../../mongodb'; import { getEnvironmentalOptions } from '../utils'; +import { type Filter } from './filters/filter'; interface ProxyParams { proxyHost?: string; @@ -85,6 +86,7 @@ export class TestConfiguration { serverApi?: ServerApi; activeResources: number; isSrv: boolean; + filters: Record; constructor( private uri: string, @@ -129,6 +131,11 @@ export class TestConfiguration { password: url.password }; } + + this.filters = Object.fromEntries( + context.filters.map(filter => [filter.constructor.name, filter]) + ); + if (context.serverlessCredentials) { const { username, password } = context.serverlessCredentials; this.options.auth = { username, password, authSource: 'admin' }; diff --git a/test/tools/runner/filters/filter.ts b/test/tools/runner/filters/filter.ts index b03ad83d5e9..6251cf44c8c 100644 --- a/test/tools/runner/filters/filter.ts +++ b/test/tools/runner/filters/filter.ts @@ -1,5 +1,3 @@ -import { type Test } from 'mocha'; - import { type MongoClient } from '../../../mongodb'; export abstract class Filter { @@ -7,5 +5,5 @@ export abstract class Filter { return; } - abstract filter(test: Test): string | boolean; + abstract filter(test: { metadata?: MongoDBMetadataUI }): string | boolean; } diff --git a/test/tools/runner/hooks/configuration.ts b/test/tools/runner/hooks/configuration.ts index ee31fc506f3..d6c4100f339 100644 --- a/test/tools/runner/hooks/configuration.ts +++ b/test/tools/runner/hooks/configuration.ts @@ -51,20 +51,22 @@ async function initializeFilters(client): Promise> { return {}; } initializedFilters = true; - const context = {}; - - for (const filter of [ - new ApiVersionFilter(), - new AuthFilter(), - new ClientSideEncryptionFilter(), - new GenericPredicateFilter(), - new IDMSMockServerFilter(), - new MongoDBTopologyFilter(), - new MongoDBVersionFilter(), - new NodeVersionFilter(), - new OSFilter(), - new ServerlessFilter() - ]) { + const context = { + filters: [ + new ApiVersionFilter(), + new AuthFilter(), + new ClientSideEncryptionFilter(), + new GenericPredicateFilter(), + new IDMSMockServerFilter(), + new MongoDBTopologyFilter(), + new MongoDBVersionFilter(), + new NodeVersionFilter(), + new OSFilter(), + new ServerlessFilter() + ] + }; + + for (const filter of context.filters) { filters.push(filter); await filter.initializeFilter(client, context); } diff --git a/test/tools/spec-runner/index.js b/test/tools/spec-runner/index.js index f312990137c..62ee50ff71e 100644 --- a/test/tools/spec-runner/index.js +++ b/test/tools/spec-runner/index.js @@ -15,7 +15,6 @@ const { HEARTBEAT_EVENTS } = require('../../mongodb'); const { isAnyRequirementSatisfied } = require('../unified-spec-runner/unified-utils'); -const { ClientSideEncryptionFilter } = require('../runner/filters/client_encryption_filter'); const { getCSFLEKMSProviders } = require('../../csfle-kms-providers'); // Promise.try alternative https://stackoverflow.com/questions/60624081/promise-try-without-bluebird/60624164?noredirect=1#comment107255389_60624164 @@ -153,7 +152,7 @@ function legacyRunOnToRunOnRequirement(runOn) { } /** - * @param {((test: { description: string }) => true | string)?} filter a function that returns true for any tests that should run, false otherwise. + * @param {((test: { description: string }, configuration: TestConfiguration) => true | string)?} filter a function that returns true for any tests that should run, false otherwise. */ function generateTopologyTests(testSuites, testContext, filter) { for (const testSuite of testSuites) { @@ -198,10 +197,8 @@ function generateTopologyTests(testSuites, testContext, filter) { let csfleFilterError = null; if (shouldRun && testContext.requiresCSFLE) { - const csfleFilter = new ClientSideEncryptionFilter(); - await csfleFilter.initializeFilter(null, {}); try { - const filterResult = csfleFilter.filter({ + const filterResult = this.configuration.filters.ClientSideEncryptionFilter.filter({ metadata: { requires: { clientSideEncryption: true } } }); if (typeof filterResult === 'string') { diff --git a/test/unit/client-side-encryption/auto_encrypter.test.ts b/test/unit/client-side-encryption/auto_encrypter.test.ts index 79bc321b802..816b3a6cb93 100644 --- a/test/unit/client-side-encryption/auto_encrypter.test.ts +++ b/test/unit/client-side-encryption/auto_encrypter.test.ts @@ -63,7 +63,10 @@ describe('AutoEncrypter', function () { return Promise.resolve(); }); - sandbox.stub(StateMachine.prototype, 'fetchCollectionInfo').resolves(MOCK_COLLINFO_RESPONSE); + const iterator = (async function* () { + yield BSON.deserialize(MOCK_COLLINFO_RESPONSE); + })(); + sandbox.stub(StateMachine.prototype, 'fetchCollectionInfo').returns(iterator); sandbox.stub(StateMachine.prototype, 'markCommand').callsFake(() => { if (ENABLE_LOG_TEST) { diff --git a/test/unit/client-side-encryption/state_machine.test.ts b/test/unit/client-side-encryption/state_machine.test.ts index 3d6a92765a8..1f43b57007b 100644 --- a/test/unit/client-side-encryption/state_machine.test.ts +++ b/test/unit/client-side-encryption/state_machine.test.ts @@ -580,11 +580,21 @@ describe('StateMachine', function () { serverSelectionTimeoutMS: 30000 }); await sleep(300); - await stateMachine - .fetchCollectionInfo(client, 'keyVault', BSON.serialize({ a: 1 }), { - timeoutContext: context - }) - .catch(e => squashError(e)); + + try { + const cursor = stateMachine.fetchCollectionInfo( + client, + 'keyVault', + BSON.serialize({ a: 1 }), + { + timeoutContext: context + } + ); + for await (const doc of cursor) void doc; + } catch { + // ignore + } + const [_filter, { timeoutContext }] = listCollectionsSpy.getCalls()[0].args; expect(timeoutContext).to.exist; expect(timeoutContext.timeoutContext).to.equal(context); @@ -596,9 +606,16 @@ describe('StateMachine', function () { 'when StateMachine.fetchCollectionInfo() is not passed a `CSOTimeoutContext`', function () { it('no timeoutContext is provided to listCollections', async function () { - await stateMachine - .fetchCollectionInfo(client, 'keyVault', BSON.serialize({ a: 1 })) - .catch(e => squashError(e)); + try { + const cursor = stateMachine.fetchCollectionInfo( + client, + 'keyVault', + BSON.serialize({ a: 1 }) + ); + for await (const doc of cursor) void doc; + } catch { + // ignore + } const [_filter, { timeoutContext }] = listCollectionsSpy.getCalls()[0].args; expect(timeoutContext).not.to.exist; });