diff --git a/package-lock.json b/package-lock.json index 7f1278b8e80..bf4b17cd966 100644 --- a/package-lock.json +++ b/package-lock.json @@ -9,7 +9,7 @@ "version": "6.4.0", "license": "Apache-2.0", "dependencies": { - "@mongodb-js/saslprep": "^1.1.0", + "@mongodb-js/saslprep": "^1.1.5", "bson": "^6.4.0", "mongodb-connection-string-url": "^3.0.0" }, @@ -1670,9 +1670,9 @@ } }, "node_modules/@mongodb-js/saslprep": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@mongodb-js/saslprep/-/saslprep-1.1.0.tgz", - "integrity": "sha512-Xfijy7HvfzzqiOAhAepF4SGN5e9leLkMvg/OPOF97XemjfVCYN/oWa75wnkc6mltMSTwY+XlbhWgUOJmkFspSw==", + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/@mongodb-js/saslprep/-/saslprep-1.1.5.tgz", + "integrity": "sha512-XLNOMH66KhJzUJNwT/qlMnS4WsNDWD5ASdyaSH3EtK+F4r/CFGa3jT4GNi4mfOitGvWXtdLgQJkQjxSVrio+jA==", "dependencies": { "sparse-bitfield": "^3.0.3" } diff --git a/package.json b/package.json index c7982579be7..753881d8e97 100644 --- a/package.json +++ b/package.json @@ -25,7 +25,7 @@ "email": "dbx-node@mongodb.com" }, "dependencies": { - "@mongodb-js/saslprep": "^1.1.0", + "@mongodb-js/saslprep": "^1.1.5", "bson": "^6.4.0", "mongodb-connection-string-url": "^3.0.0" }, diff --git a/src/cmap/connect.ts b/src/cmap/connect.ts index 54b00a7031b..24022c9d182 100644 --- a/src/cmap/connect.ts +++ b/src/cmap/connect.ts @@ -25,7 +25,6 @@ import { type ConnectionOptions, CryptoConnection } from './connection'; -import type { ClientMetadata } from './handshake/client_metadata'; import { MAX_SUPPORTED_SERVER_VERSION, MAX_SUPPORTED_WIRE_VERSION, @@ -183,7 +182,7 @@ export interface HandshakeDocument extends Document { ismaster?: boolean; hello?: boolean; helloOk?: boolean; - client: ClientMetadata; + client: Document; compression: string[]; saslSupportedMechs?: string; loadBalanced?: boolean; @@ -200,11 +199,12 @@ export async function prepareHandshakeDocument( const options = authContext.options; const compressors = options.compressors ? options.compressors : []; const { serverApi } = authContext.connection; + const clientMetadata: Document = await options.extendedMetadata; const handshakeDoc: HandshakeDocument = { [serverApi?.version || options.loadBalanced === true ? 'hello' : LEGACY_HELLO_COMMAND]: 1, helloOk: true, - client: options.metadata, + client: clientMetadata, compression: compressors }; @@ -319,7 +319,6 @@ export async function makeSocket(options: MakeConnectionOptions): Promise; + /** @internal */ mongoLogger?: MongoLogger | undefined; } @@ -180,18 +181,18 @@ export class Connection extends TypedEventEmitter { * Once connection is established, command logging can log events (if enabled) */ public established: boolean; + /** Indicates that the connection (including underlying TCP socket) has been closed. */ + public closed = false; private lastUseTime: number; private clusterTime: Document | null = null; + private error: Error | null = null; + private dataEvents: AsyncGenerator | null = null; private readonly socketTimeoutMS: number; private readonly monitorCommands: boolean; private readonly socket: Stream; - private readonly controller: AbortController; - private readonly signal: AbortSignal; private readonly messageStream: Readable; - private readonly socketWrite: (buffer: Uint8Array) => Promise; - private readonly aborted: Promise; /** @event */ static readonly COMMAND_STARTED = COMMAND_STARTED; @@ -211,6 +212,7 @@ export class Connection extends TypedEventEmitter { constructor(stream: Stream, options: ConnectionOptions) { super(); + this.socket = stream; this.id = options.id; this.address = streamIdentifier(stream, options); this.socketTimeoutMS = options.socketTimeoutMS ?? 0; @@ -223,39 +225,12 @@ export class Connection extends TypedEventEmitter { this.generation = options.generation; this.lastUseTime = now(); - this.socket = stream; - - // TODO: Remove signal from connection layer - this.controller = new AbortController(); - const { signal } = this.controller; - this.signal = signal; - const { promise: aborted, reject } = promiseWithResolvers(); - aborted.then(undefined, () => null); // Prevent unhandled rejection - this.signal.addEventListener( - 'abort', - function onAbort() { - reject(signal.reason); - }, - { once: true } - ); - this.aborted = aborted; - this.messageStream = this.socket .on('error', this.onError.bind(this)) .pipe(new SizedMessageTransform({ connection: this })) .on('error', this.onError.bind(this)); this.socket.on('close', this.onClose.bind(this)); this.socket.on('timeout', this.onTimeout.bind(this)); - - const socketWrite = promisify(this.socket.write.bind(this.socket)); - this.socketWrite = async buffer => { - return Promise.race([socketWrite(buffer), this.aborted]); - }; - } - - /** Indicates that the connection (including underlying TCP socket) has been closed. */ - public get closed(): boolean { - return this.signal.aborted; } public get hello() { @@ -306,7 +281,7 @@ export class Connection extends TypedEventEmitter { this.lastUseTime = now(); } - public onError(error?: Error) { + public onError(error: Error) { this.cleanup(error); } @@ -349,13 +324,15 @@ export class Connection extends TypedEventEmitter { * * This method does nothing if the connection is already closed. */ - private cleanup(error?: Error): void { + private cleanup(error: Error): void { if (this.closed) { return; } this.socket.destroy(); - this.controller.abort(error); + this.error = error; + this.dataEvents?.throw(error).then(undefined, () => null); // squash unhandled rejection + this.closed = true; this.emit(Connection.CLOSE); } @@ -596,7 +573,7 @@ export class Connection extends TypedEventEmitter { } private throwIfAborted() { - this.signal.throwIfAborted(); + if (this.error) throw this.error; } /** @@ -619,7 +596,8 @@ export class Connection extends TypedEventEmitter { const buffer = Buffer.concat(await finalCommand.toBin()); - return this.socketWrite(buffer); + if (this.socket.write(buffer)) return; + return once(this.socket, 'drain'); } /** @@ -632,13 +610,19 @@ export class Connection extends TypedEventEmitter { * Note that `for-await` loops call `return` automatically when the loop is exited. */ private async *readMany(): AsyncGenerator { - for await (const message of onData(this.messageStream, { signal: this.signal })) { - const response = await decompressResponse(message); - yield response; + try { + this.dataEvents = onData(this.messageStream); + for await (const message of this.dataEvents) { + const response = await decompressResponse(message); + yield response; - if (!response.moreToCome) { - return; + if (!response.moreToCome) { + return; + } } + } finally { + this.dataEvents = null; + this.throwIfAborted(); } } } diff --git a/src/cmap/connection_pool.ts b/src/cmap/connection_pool.ts index 435b66936d5..64b89ee1200 100644 --- a/src/cmap/connection_pool.ts +++ b/src/cmap/connection_pool.ts @@ -233,8 +233,7 @@ export class ConnectionPool extends TypedEventEmitter { maxIdleTimeMS: options.maxIdleTimeMS ?? 0, waitQueueTimeoutMS: options.waitQueueTimeoutMS ?? 0, minPoolSizeCheckFrequencyMS: options.minPoolSizeCheckFrequencyMS ?? 100, - autoEncrypter: options.autoEncrypter, - metadata: options.metadata + autoEncrypter: options.autoEncrypter }); if (this.options.minPoolSize > this.options.maxPoolSize) { diff --git a/src/cmap/handshake/client_metadata.ts b/src/cmap/handshake/client_metadata.ts index fb1ba40b14e..c9589f6e009 100644 --- a/src/cmap/handshake/client_metadata.ts +++ b/src/cmap/handshake/client_metadata.ts @@ -1,7 +1,8 @@ +import { promises as fs } from 'fs'; import * as os from 'os'; import * as process from 'process'; -import { BSON, Int32 } from '../../bson'; +import { BSON, type Document, Int32 } from '../../bson'; import { MongoInvalidArgumentError } from '../../error'; import type { MongoOptions } from '../../mongo_client'; @@ -71,13 +72,13 @@ export class LimitedSizeDocument { return true; } - toObject(): ClientMetadata { + toObject(): Document { return BSON.deserialize(BSON.serialize(this.document), { promoteLongs: false, promoteBuffers: false, promoteValues: false, useBigInt64: false - }) as ClientMetadata; + }); } } @@ -152,8 +153,57 @@ export function makeClientMetadata(options: MakeClientMetadataOptions): ClientMe } } } + return metadataDocument.toObject() as ClientMetadata; +} + +let dockerPromise: Promise; +/** @internal */ +async function getContainerMetadata() { + const containerMetadata: Record = {}; + dockerPromise ??= fs.access('/.dockerenv').then( + () => true, + () => false + ); + const isDocker = await dockerPromise; + + const { KUBERNETES_SERVICE_HOST = '' } = process.env; + const isKubernetes = KUBERNETES_SERVICE_HOST.length > 0 ? true : false; + + if (isDocker) containerMetadata.runtime = 'docker'; + if (isKubernetes) containerMetadata.orchestrator = 'kubernetes'; + + return containerMetadata; +} + +/** + * @internal + * Re-add each metadata value. + * Attempt to add new env container metadata, but keep old data if it does not fit. + */ +export async function addContainerMetadata(originalMetadata: ClientMetadata) { + const containerMetadata = await getContainerMetadata(); + if (Object.keys(containerMetadata).length === 0) return originalMetadata; + + const extendedMetadata = new LimitedSizeDocument(512); + + const extendedEnvMetadata = { ...originalMetadata?.env, container: containerMetadata }; + + for (const [key, val] of Object.entries(originalMetadata)) { + if (key !== 'env') { + extendedMetadata.ifItFitsItSits(key, val); + } else { + if (!extendedMetadata.ifItFitsItSits('env', extendedEnvMetadata)) { + // add in old data if newer / extended metadata does not fit + extendedMetadata.ifItFitsItSits('env', val); + } + } + } + + if (!('env' in originalMetadata)) { + extendedMetadata.ifItFitsItSits('env', extendedEnvMetadata); + } - return metadataDocument.toObject(); + return extendedMetadata.toObject(); } /** diff --git a/src/cmap/wire_protocol/on_data.ts b/src/cmap/wire_protocol/on_data.ts index 04c82f709d3..b99c950d96f 100644 --- a/src/cmap/wire_protocol/on_data.ts +++ b/src/cmap/wire_protocol/on_data.ts @@ -16,11 +16,9 @@ type PendingPromises = Omit< * https://nodejs.org/api/events.html#eventsonemitter-eventname-options * * Returns an AsyncIterator that iterates each 'data' event emitted from emitter. - * It will reject upon an error event or if the provided signal is aborted. + * It will reject upon an error event. */ -export function onData(emitter: EventEmitter, options: { signal: AbortSignal }) { - const signal = options.signal; - +export function onData(emitter: EventEmitter) { // Setup pending events and pending promise lists /** * When the caller has not yet called .next(), we store the @@ -89,19 +87,8 @@ export function onData(emitter: EventEmitter, options: { signal: AbortSignal }) emitter.on('data', eventHandler); emitter.on('error', errorHandler); - if (signal.aborted) { - // If the signal is aborted, set up the first .next() call to be a rejection - queueMicrotask(abortListener); - } else { - signal.addEventListener('abort', abortListener, { once: true }); - } - return iterator; - function abortListener() { - errorHandler(signal.reason); - } - function eventHandler(value: Buffer) { const promise = unconsumedPromises.shift(); if (promise != null) promise.resolve({ value, done: false }); @@ -119,7 +106,6 @@ export function onData(emitter: EventEmitter, options: { signal: AbortSignal }) // Adding event handlers emitter.off('data', eventHandler); emitter.off('error', errorHandler); - signal.removeEventListener('abort', abortListener); finished = true; const doneResult = { value: undefined, done: finished } as const; diff --git a/src/connection_string.ts b/src/connection_string.ts index e6ae0b82b2f..152a4be6447 100644 --- a/src/connection_string.ts +++ b/src/connection_string.ts @@ -5,7 +5,7 @@ import { URLSearchParams } from 'url'; import type { Document } from './bson'; import { MongoCredentials } from './cmap/auth/mongo_credentials'; import { AUTH_MECHS_AUTH_SRC_EXTERNAL, AuthMechanism } from './cmap/auth/providers'; -import { makeClientMetadata } from './cmap/handshake/client_metadata'; +import { addContainerMetadata, makeClientMetadata } from './cmap/handshake/client_metadata'; import { Compressor, type CompressorName } from './cmap/wire_protocol/compression'; import { Encrypter } from './encrypter'; import { @@ -552,6 +552,10 @@ export function parseOptions( mongoOptions.metadata = makeClientMetadata(mongoOptions); + mongoOptions.extendedMetadata = addContainerMetadata(mongoOptions.metadata).catch(() => { + /* rejections will be handled later */ + }); + return mongoOptions; } diff --git a/src/error.ts b/src/error.ts index b488d0d5d75..6f91f246346 100644 --- a/src/error.ts +++ b/src/error.ts @@ -200,6 +200,8 @@ export class MongoError extends Error { * @category Error */ export class MongoServerError extends MongoError { + /** Raw error result document returned by server. */ + errorResponse: ErrorDescription; codeName?: string; writeConcernError?: Document; errInfo?: Document; @@ -223,9 +225,17 @@ export class MongoServerError extends MongoError { this[kErrorLabels] = new Set(message.errorLabels); } + this.errorResponse = message; + for (const name in message) { - if (name !== 'errorLabels' && name !== 'errmsg' && name !== 'message') + if ( + name !== 'errorLabels' && + name !== 'errmsg' && + name !== 'message' && + name !== 'errorResponse' + ) { this[name] = message[name]; + } } } diff --git a/src/mongo_client.ts b/src/mongo_client.ts index be039944a4f..5ab24eee9bf 100644 --- a/src/mongo_client.ts +++ b/src/mongo_client.ts @@ -827,6 +827,8 @@ export interface MongoOptions dbName: string; metadata: ClientMetadata; /** @internal */ + extendedMetadata: Promise; + /** @internal */ autoEncrypter?: AutoEncrypter; proxyHost?: string; proxyPort?: number; diff --git a/src/mongo_logger.ts b/src/mongo_logger.ts index 27fcbf8d308..be345eaeff7 100644 --- a/src/mongo_logger.ts +++ b/src/mongo_logger.ts @@ -220,7 +220,8 @@ export function createStdioLogger(stream: { }): MongoDBLogWritable { return { write: promisify((log: Log, cb: (error?: Error) => void): unknown => { - stream.write(inspect(log, { compact: true, breakLength: Infinity }), 'utf-8', cb); + const logLine = inspect(log, { compact: true, breakLength: Infinity }); + stream.write(`${logLine}\n`, 'utf-8', cb); return; }) }; diff --git a/src/sdam/server.ts b/src/sdam/server.ts index 13cadd9f7fc..f1a0bf1d985 100644 --- a/src/sdam/server.ts +++ b/src/sdam/server.ts @@ -171,7 +171,7 @@ export class Server extends TypedEventEmitter { this.monitor.on(event, (e: any) => this.emit(event, e)); } - this.monitor.on('resetServer', (error: MongoError) => markServerUnknown(this, error)); + this.monitor.on('resetServer', (error: MongoServerError) => markServerUnknown(this, error)); this.monitor.on(Server.SERVER_HEARTBEAT_SUCCEEDED, (event: ServerHeartbeatSucceededEvent) => { this.emit( Server.DESCRIPTION_RECEIVED, @@ -369,7 +369,7 @@ export class Server extends TypedEventEmitter { // clear for the specific service id. if (!this.loadBalanced) { error.addErrorLabel(MongoErrorLabel.ResetPool); - markServerUnknown(this, error); + markServerUnknown(this, error as MongoServerError); } else if (connection) { this.pool.clear({ serviceId: connection.serviceId }); } @@ -385,7 +385,7 @@ export class Server extends TypedEventEmitter { if (shouldClearPool) { error.addErrorLabel(MongoErrorLabel.ResetPool); } - markServerUnknown(this, error); + markServerUnknown(this, error as MongoServerError); process.nextTick(() => this.requestCheck()); } } diff --git a/src/sdam/topology.ts b/src/sdam/topology.ts index 400db63870f..68d85657387 100644 --- a/src/sdam/topology.ts +++ b/src/sdam/topology.ts @@ -158,6 +158,7 @@ export interface TopologyOptions extends BSONSerializeOptions, ServerOptions { directConnection: boolean; loadBalanced: boolean; metadata: ClientMetadata; + extendedMetadata: Promise; serverMonitoringMode: ServerMonitoringMode; /** MongoDB server API version */ serverApi?: ServerApi; diff --git a/src/sdam/topology_description.ts b/src/sdam/topology_description.ts index 380e6c8f0c9..f2fafaf87b7 100644 --- a/src/sdam/topology_description.ts +++ b/src/sdam/topology_description.ts @@ -313,7 +313,7 @@ export class TopologyDescription { ); if (descriptionsWithError.length > 0) { - return descriptionsWithError[0].error; + return descriptionsWithError[0].error as MongoServerError; } return null; diff --git a/src/utils.ts b/src/utils.ts index 8020d508f83..4a6f7a4e8c5 100644 --- a/src/utils.ts +++ b/src/utils.ts @@ -1,5 +1,6 @@ import * as crypto from 'crypto'; import type { SrvRecord } from 'dns'; +import { type EventEmitter } from 'events'; import * as http from 'http'; import { clearTimeout, setTimeout } from 'timers'; import * as url from 'url'; @@ -1295,3 +1296,27 @@ export function promiseWithResolvers() { } export const randomBytes = promisify(crypto.randomBytes); + +/** + * Replicates the events.once helper. + * + * Removes unused signal logic and It **only** supports 0 or 1 argument events. + * + * @param ee - An event emitter that may emit `ev` + * @param name - An event name to wait for + */ +export async function once(ee: EventEmitter, name: string): Promise { + const { promise, resolve, reject } = promiseWithResolvers(); + const onEvent = (data: T) => resolve(data); + const onError = (error: Error) => reject(error); + + ee.once(name, onEvent).once('error', onError); + try { + const res = await promise; + ee.off('error', onError); + return res; + } catch (error) { + ee.off(name, onEvent); + throw error; + } +} diff --git a/test/integration/auth/mongodb_aws.test.ts b/test/integration/auth/mongodb_aws.test.ts index cc9e17881fc..d3f6933c35d 100644 --- a/test/integration/auth/mongodb_aws.test.ts +++ b/test/integration/auth/mongodb_aws.test.ts @@ -88,15 +88,17 @@ describe('MONGODB-AWS', function () { }); describe('with missing aws token', () => { - let awsSessionToken; + let awsSessionToken: string | undefined; - beforeEach(function () { + beforeEach(() => { awsSessionToken = process.env.AWS_SESSION_TOKEN; delete process.env.AWS_SESSION_TOKEN; }); - afterEach(async () => { - process.env.AWS_SESSION_TOKEN = awsSessionToken; + afterEach(() => { + if (awsSessionToken != null) { + process.env.AWS_SESSION_TOKEN = awsSessionToken; + } }); it('should not throw an exception when aws token is missing', async function () { diff --git a/test/integration/collection-management/collection_management.spec.test.js b/test/integration/collection-management/collection_management.spec.test.js index 1c78969d67d..9242b24133d 100644 --- a/test/integration/collection-management/collection_management.spec.test.js +++ b/test/integration/collection-management/collection_management.spec.test.js @@ -4,7 +4,10 @@ const { loadSpecTests } = require('../../spec/index'); const { runUnifiedSuite } = require('../../tools/unified-spec-runner/runner'); // The Node driver does not have a Collection.modifyCollection helper. -const SKIPPED_TESTS = ['modifyCollection to changeStreamPreAndPostImages enabled']; +const SKIPPED_TESTS = [ + 'modifyCollection to changeStreamPreAndPostImages enabled', + 'modifyCollection prepareUnique violations are accessible' +]; describe('Collection management unified spec tests', function () { runUnifiedSuite(loadSpecTests('collection-management'), ({ description }) => diff --git a/test/integration/connection-monitoring-and-pooling/connection.test.ts b/test/integration/connection-monitoring-and-pooling/connection.test.ts index a1e8f1f9571..421a9e02bbf 100644 --- a/test/integration/connection-monitoring-and-pooling/connection.test.ts +++ b/test/integration/connection-monitoring-and-pooling/connection.test.ts @@ -1,6 +1,11 @@ import { expect } from 'chai'; +import { type EventEmitter, once } from 'events'; +import * as sinon from 'sinon'; +import { setTimeout } from 'timers'; import { + addContainerMetadata, + Binary, connect, Connection, type ConnectionOptions, @@ -14,7 +19,9 @@ import { ServerHeartbeatStartedEvent, Topology } from '../../mongodb'; +import * as mock from '../../tools/mongodb-mock/index'; import { skipBrokenAuthTestBeforeEachHook } from '../../tools/runner/hooks/configuration'; +import { getSymbolFrom, sleep } from '../../tools/utils'; import { assert as test, setupDatabase } from '../shared'; const commonConnectOptions = { @@ -50,7 +57,8 @@ describe('Connection', function () { ...commonConnectOptions, connectionType: Connection, ...this.configuration.options, - metadata: makeClientMetadata({ driverInfo: {} }) + metadata: makeClientMetadata({ driverInfo: {} }), + extendedMetadata: addContainerMetadata(makeClientMetadata({ driverInfo: {} })) }; let conn; @@ -72,7 +80,8 @@ describe('Connection', function () { connectionType: Connection, ...this.configuration.options, monitorCommands: true, - metadata: makeClientMetadata({ driverInfo: {} }) + metadata: makeClientMetadata({ driverInfo: {} }), + extendedMetadata: addContainerMetadata(makeClientMetadata({ driverInfo: {} })) }; let conn; @@ -197,6 +206,84 @@ describe('Connection', function () { client.connect(); }); + context( + 'when a large message is written to the socket', + { requires: { topology: 'single', auth: 'disabled' } }, + () => { + let client, mockServer: import('../../tools/mongodb-mock/src/server').MockServer; + + beforeEach(async function () { + mockServer = await mock.createServer(); + + mockServer + .addMessageHandler('insert', req => { + setTimeout(() => { + req.reply({ ok: 1 }); + }, 800); + }) + .addMessageHandler('hello', req => { + req.reply(Object.assign({}, mock.HELLO)); + }) + .addMessageHandler(LEGACY_HELLO_COMMAND, req => { + req.reply(Object.assign({}, mock.HELLO)); + }); + + client = new MongoClient(`mongodb://${mockServer.uri()}`, { + minPoolSize: 1, + maxPoolSize: 1 + }); + }); + + afterEach(async function () { + await client.close(); + mockServer.destroy(); + sinon.restore(); + }); + + it('waits for an async drain event because the write was buffered', async () => { + const connectionReady = once(client, 'connectionReady'); + await client.connect(); + await connectionReady; + + // Get the only connection + const pool = [...client.topology.s.servers.values()][0].pool; + + const connections = pool[getSymbolFrom(pool, 'connections')]; + expect(connections).to.have.lengthOf(1); + + const connection = connections.first(); + const socket: EventEmitter = connection.socket; + + // Spy on the socket event listeners + const addedListeners: string[] = []; + const removedListeners: string[] = []; + socket + .on('removeListener', name => removedListeners.push(name)) + .on('newListener', name => addedListeners.push(name)); + + // Make server sockets block + for (const s of mockServer.sockets) s.pause(); + + const insert = client + .db('test') + .collection('test') + // Anything above 16Kb should work I think (10mb to be extra sure) + .insertOne({ a: new Binary(Buffer.alloc(10 * (2 ** 10) ** 2), 127) }); + + // Sleep a bit and unblock server sockets + await sleep(10); + for (const s of mockServer.sockets) s.resume(); + + // Let the operation finish + await insert; + + // Ensure that we used the drain event for this write + expect(addedListeners).to.deep.equal(['drain', 'error']); + expect(removedListeners).to.deep.equal(['drain', 'error']); + }); + } + ); + context('when connecting with a username and password', () => { let utilClient: MongoClient; let client: MongoClient; diff --git a/test/integration/crud/crud.spec.test.js b/test/integration/crud/crud.spec.test.js index ce145c038c5..a7fd404c8bd 100644 --- a/test/integration/crud/crud.spec.test.js +++ b/test/integration/crud/crud.spec.test.js @@ -425,6 +425,10 @@ describe('CRUD spec v1', function () { } }); +// TODO(NODE-5998) - The Node driver UTR does not have a Collection.modifyCollection helper. +const SKIPPED_TESTS = ['findOneAndUpdate document validation errInfo is accessible']; describe('CRUD unified', function () { - runUnifiedSuite(loadSpecTests(path.join('crud', 'unified'))); + runUnifiedSuite(loadSpecTests(path.join('crud', 'unified')), ({ description }) => + SKIPPED_TESTS.includes(description) ? `the Node driver does not have a collMod helper.` : false + ); }); diff --git a/test/integration/node-specific/resource_clean_up.test.ts b/test/integration/node-specific/resource_clean_up.test.ts index 0d330914c0f..e370986a264 100644 --- a/test/integration/node-specific/resource_clean_up.test.ts +++ b/test/integration/node-specific/resource_clean_up.test.ts @@ -1,5 +1,8 @@ +import * as v8 from 'node:v8'; + import { expect } from 'chai'; +import { sleep } from '../../tools/utils'; import { runScript } from './resource_tracking_script_builder'; /** @@ -86,4 +89,34 @@ describe('Driver Resources', () => { }); }); }); + + context('when 100s of operations are executed and complete', () => { + beforeEach(function () { + if (this.currentTest && typeof v8.queryObjects !== 'function') { + this.currentTest.skipReason = 'Test requires v8.queryObjects API to count Promises'; + this.currentTest?.skip(); + } + }); + + let client; + beforeEach(async function () { + client = this.configuration.newClient(); + }); + + afterEach(async function () { + await client.close(); + }); + + it('does not leave behind additional promises', async () => { + const test = client.db('test').collection('test'); + const promiseCountBefore = v8.queryObjects(Promise, { format: 'count' }); + for (let i = 0; i < 100; i++) { + await test.findOne(); + } + await sleep(10); + const promiseCountAfter = v8.queryObjects(Promise, { format: 'count' }); + + expect(promiseCountAfter).to.be.within(promiseCountBefore - 5, promiseCountBefore + 5); + }); + }); }); diff --git a/test/integration/transactions-convenient-api/transactions-convenient-api.spec.test.ts b/test/integration/transactions-convenient-api/transactions-convenient-api.spec.test.ts index e9ca8f42d3f..8b4c0ee3d5f 100644 --- a/test/integration/transactions-convenient-api/transactions-convenient-api.spec.test.ts +++ b/test/integration/transactions-convenient-api/transactions-convenient-api.spec.test.ts @@ -5,19 +5,8 @@ import { runUnifiedSuite } from '../../tools/unified-spec-runner/runner'; const SKIPPED_TESTS = [ 'callback succeeds after multiple connection errors', - 'callback is not retried after non-transient error', 'callback is not retried after non-transient error (DuplicateKeyError)', - 'withTransaction succeeds if callback aborts', - 'unpin after transient error within a transaction', - 'withTransaction succeeds if callback commits', - 'withTransaction still succeeds if callback aborts and runs extra op', - 'withTransaction still succeeds if callback commits and runs extra op', - 'withTransaction commits after callback returns (second transaction)', - 'withTransaction commits after callback returns', - 'withTransaction and no transaction options set', - 'withTransaction inherits transaction options from defaultTransactionOptions', - 'withTransaction explicit transaction options override defaultTransactionOptions', - 'withTransaction explicit transaction options' + 'withTransaction succeeds if callback aborts' ]; describe('Transactions Convenient API Spec Unified Tests', function () { @@ -33,7 +22,7 @@ describe('Transactions Convenient API Spec Unified Tests', function () { runUnifiedSuite(loadSpecTests(path.join('transactions-convenient-api', 'unified')), test => { return SKIPPED_TESTS.includes(test.description) - ? 'TODO(NODE-5855/DRIVERS-2816): Skipping failing transaction tests' + ? 'TODO(NODE-5855): Skipping failing transaction tests' : false; }); }); diff --git a/test/spec/collection-management/modifyCollection-errorResponse.json b/test/spec/collection-management/modifyCollection-errorResponse.json new file mode 100644 index 00000000000..aea71eb08f0 --- /dev/null +++ b/test/spec/collection-management/modifyCollection-errorResponse.json @@ -0,0 +1,118 @@ +{ + "description": "modifyCollection-errorResponse", + "schemaVersion": "1.12", + "createEntities": [ + { + "client": { + "id": "client0", + "observeEvents": [ + "commandStartedEvent" + ] + } + }, + { + "database": { + "id": "database0", + "client": "client0", + "databaseName": "collMod-tests" + } + }, + { + "collection": { + "id": "collection0", + "database": "database0", + "collectionName": "test" + } + } + ], + "initialData": [ + { + "collectionName": "test", + "databaseName": "collMod-tests", + "documents": [ + { + "_id": 1, + "x": 1 + }, + { + "_id": 2, + "x": 1 + } + ] + } + ], + "tests": [ + { + "description": "modifyCollection prepareUnique violations are accessible", + "runOnRequirements": [ + { + "minServerVersion": "5.2" + } + ], + "operations": [ + { + "name": "createIndex", + "object": "collection0", + "arguments": { + "keys": { + "x": 1 + } + } + }, + { + "name": "modifyCollection", + "object": "database0", + "arguments": { + "collection": "test", + "index": { + "keyPattern": { + "x": 1 + }, + "prepareUnique": true + } + } + }, + { + "name": "insertOne", + "object": "collection0", + "arguments": { + "document": { + "_id": 3, + "x": 1 + } + }, + "expectError": { + "errorCode": 11000 + } + }, + { + "name": "modifyCollection", + "object": "database0", + "arguments": { + "collection": "test", + "index": { + "keyPattern": { + "x": 1 + }, + "unique": true + } + }, + "expectError": { + "isClientError": false, + "errorCode": 359, + "errorResponse": { + "violations": [ + { + "ids": [ + 1, + 2 + ] + } + ] + } + } + } + ] + } + ] +} diff --git a/test/spec/collection-management/modifyCollection-errorResponse.yml b/test/spec/collection-management/modifyCollection-errorResponse.yml new file mode 100644 index 00000000000..e61a01211cb --- /dev/null +++ b/test/spec/collection-management/modifyCollection-errorResponse.yml @@ -0,0 +1,59 @@ +description: "modifyCollection-errorResponse" + +schemaVersion: "1.12" + +createEntities: + - client: + id: &client0 client0 + observeEvents: [ commandStartedEvent ] + - database: + id: &database0 database0 + client: *client0 + databaseName: &database0Name collMod-tests + - collection: + id: &collection0 collection0 + database: *database0 + collectionName: &collection0Name test + +initialData: &initialData + - collectionName: *collection0Name + databaseName: *database0Name + documents: + - { _id: 1, x: 1 } + - { _id: 2, x: 1 } + +tests: + - description: "modifyCollection prepareUnique violations are accessible" + runOnRequirements: + - minServerVersion: "5.2" # SERVER-61158 + operations: + - name: createIndex + object: *collection0 + arguments: + keys: { x: 1 } + - name: modifyCollection + object: *database0 + arguments: + collection: *collection0Name + index: + keyPattern: { x: 1 } + prepareUnique: true + - name: insertOne + object: *collection0 + arguments: + document: { _id: 3, x: 1 } + expectError: + errorCode: 11000 # DuplicateKey + - name: modifyCollection + object: *database0 + arguments: + collection: *collection0Name + index: + keyPattern: { x: 1 } + unique: true + expectError: + isClientError: false + errorCode: 359 # CannotConvertIndexToUnique + errorResponse: + violations: + - { ids: [ 1, 2 ] } diff --git a/test/spec/crud/unified/aggregate-merge-errorResponse.json b/test/spec/crud/unified/aggregate-merge-errorResponse.json new file mode 100644 index 00000000000..6c7305fd91f --- /dev/null +++ b/test/spec/crud/unified/aggregate-merge-errorResponse.json @@ -0,0 +1,90 @@ +{ + "description": "aggregate-merge-errorResponse", + "schemaVersion": "1.12", + "createEntities": [ + { + "client": { + "id": "client0" + } + }, + { + "database": { + "id": "database0", + "client": "client0", + "databaseName": "crud-tests" + } + }, + { + "collection": { + "id": "collection0", + "database": "database0", + "collectionName": "test" + } + } + ], + "initialData": [ + { + "collectionName": "test", + "databaseName": "crud-tests", + "documents": [ + { + "_id": 1, + "x": 1 + }, + { + "_id": 2, + "x": 1 + } + ] + } + ], + "tests": [ + { + "description": "aggregate $merge DuplicateKey error is accessible", + "runOnRequirements": [ + { + "minServerVersion": "5.1", + "topologies": [ + "single", + "replicaset" + ] + } + ], + "operations": [ + { + "name": "aggregate", + "object": "database0", + "arguments": { + "pipeline": [ + { + "$documents": [ + { + "_id": 2, + "x": 1 + } + ] + }, + { + "$merge": { + "into": "test", + "whenMatched": "fail" + } + } + ] + }, + "expectError": { + "errorCode": 11000, + "errorResponse": { + "keyPattern": { + "_id": 1 + }, + "keyValue": { + "_id": 2 + } + } + } + } + ] + } + ] +} diff --git a/test/spec/crud/unified/aggregate-merge-errorResponse.yml b/test/spec/crud/unified/aggregate-merge-errorResponse.yml new file mode 100644 index 00000000000..5fd679bffb2 --- /dev/null +++ b/test/spec/crud/unified/aggregate-merge-errorResponse.yml @@ -0,0 +1,42 @@ +description: "aggregate-merge-errorResponse" + +schemaVersion: "1.12" + +createEntities: + - client: + id: &client0 client0 + - database: + id: &database0 database0 + client: *client0 + databaseName: &database0Name crud-tests + - collection: + id: &collection0 collection0 + database: *database0 + collectionName: &collection0Name test + +initialData: &initialData + - collectionName: *collection0Name + databaseName: *database0Name + documents: + - { _id: 1, x: 1 } + - { _id: 2, x: 1 } + +tests: + - description: "aggregate $merge DuplicateKey error is accessible" + runOnRequirements: + - minServerVersion: "5.1" # SERVER-59097 + # Exclude sharded topologies since the aggregate command fails with + # IllegalOperation(20) instead of DuplicateKey(11000) + topologies: [ single, replicaset ] + operations: + - name: aggregate + object: *database0 + arguments: + pipeline: + - { $documents: [ { _id: 2, x: 1 } ] } + - { $merge: { into: *collection0Name, whenMatched: "fail" } } + expectError: + errorCode: 11000 # DuplicateKey + errorResponse: + keyPattern: { _id: 1 } + keyValue: { _id: 2 } diff --git a/test/spec/crud/unified/bulkWrite-errorResponse.json b/test/spec/crud/unified/bulkWrite-errorResponse.json new file mode 100644 index 00000000000..157637c713a --- /dev/null +++ b/test/spec/crud/unified/bulkWrite-errorResponse.json @@ -0,0 +1,88 @@ +{ + "description": "bulkWrite-errorResponse", + "schemaVersion": "1.12", + "createEntities": [ + { + "client": { + "id": "client0", + "useMultipleMongoses": false + } + }, + { + "database": { + "id": "database0", + "client": "client0", + "databaseName": "crud-tests" + } + }, + { + "collection": { + "id": "collection0", + "database": "database0", + "collectionName": "test" + } + } + ], + "tests": [ + { + "description": "bulkWrite operations support errorResponse assertions", + "runOnRequirements": [ + { + "minServerVersion": "4.0.0", + "topologies": [ + "single", + "replicaset" + ] + }, + { + "minServerVersion": "4.2.0", + "topologies": [ + "sharded" + ] + } + ], + "operations": [ + { + "name": "failPoint", + "object": "testRunner", + "arguments": { + "client": "client0", + "failPoint": { + "configureFailPoint": "failCommand", + "mode": { + "times": 1 + }, + "data": { + "failCommands": [ + "insert" + ], + "errorCode": 8 + } + } + } + }, + { + "name": "bulkWrite", + "object": "collection0", + "arguments": { + "requests": [ + { + "insertOne": { + "document": { + "_id": 1 + } + } + } + ] + }, + "expectError": { + "errorCode": 8, + "errorResponse": { + "code": 8 + } + } + } + ] + } + ] +} diff --git a/test/spec/crud/unified/bulkWrite-errorResponse.yml b/test/spec/crud/unified/bulkWrite-errorResponse.yml new file mode 100644 index 00000000000..d4f335dfd33 --- /dev/null +++ b/test/spec/crud/unified/bulkWrite-errorResponse.yml @@ -0,0 +1,50 @@ +description: "bulkWrite-errorResponse" + +schemaVersion: "1.12" + +createEntities: + - client: + id: &client0 client0 + useMultipleMongoses: false + - database: + id: &database0 database0 + client: *client0 + databaseName: &database0Name crud-tests + - collection: + id: &collection0 collection0 + database: *database0 + collectionName: &collection0Name test + +tests: + # This test intentionally executes only a single insert operation in the bulk + # write to make the error code and response assertions less ambiguous. That + # said, some drivers may still need to skip this test because the CRUD spec + # does not prescribe how drivers should formulate a BulkWriteException beyond + # collecting write and write concern errors. + - description: "bulkWrite operations support errorResponse assertions" + runOnRequirements: + - minServerVersion: "4.0.0" + topologies: [ single, replicaset ] + - minServerVersion: "4.2.0" + topologies: [ sharded ] + operations: + - name: failPoint + object: testRunner + arguments: + client: *client0 + failPoint: + configureFailPoint: failCommand + mode: { times: 1 } + data: + failCommands: [ insert ] + errorCode: &errorCode 8 # UnknownError + - name: bulkWrite + object: *collection0 + arguments: + requests: + - insertOne: + document: { _id: 1 } + expectError: + errorCode: *errorCode + errorResponse: + code: *errorCode diff --git a/test/spec/crud/unified/deleteOne-errorResponse.json b/test/spec/crud/unified/deleteOne-errorResponse.json new file mode 100644 index 00000000000..1f3a266f1ef --- /dev/null +++ b/test/spec/crud/unified/deleteOne-errorResponse.json @@ -0,0 +1,82 @@ +{ + "description": "deleteOne-errorResponse", + "schemaVersion": "1.12", + "createEntities": [ + { + "client": { + "id": "client0", + "useMultipleMongoses": false + } + }, + { + "database": { + "id": "database0", + "client": "client0", + "databaseName": "crud-tests" + } + }, + { + "collection": { + "id": "collection0", + "database": "database0", + "collectionName": "test" + } + } + ], + "tests": [ + { + "description": "delete operations support errorResponse assertions", + "runOnRequirements": [ + { + "minServerVersion": "4.0.0", + "topologies": [ + "single", + "replicaset" + ] + }, + { + "minServerVersion": "4.2.0", + "topologies": [ + "sharded" + ] + } + ], + "operations": [ + { + "name": "failPoint", + "object": "testRunner", + "arguments": { + "client": "client0", + "failPoint": { + "configureFailPoint": "failCommand", + "mode": { + "times": 1 + }, + "data": { + "failCommands": [ + "delete" + ], + "errorCode": 8 + } + } + } + }, + { + "name": "deleteOne", + "object": "collection0", + "arguments": { + "filter": { + "_id": 1 + } + }, + "expectError": { + "errorCode": 8, + "errorResponse": { + "code": 8 + } + } + } + ] + } + ] +} diff --git a/test/spec/crud/unified/deleteOne-errorResponse.yml b/test/spec/crud/unified/deleteOne-errorResponse.yml new file mode 100644 index 00000000000..dcf013060e9 --- /dev/null +++ b/test/spec/crud/unified/deleteOne-errorResponse.yml @@ -0,0 +1,46 @@ +description: "deleteOne-errorResponse" + +schemaVersion: "1.12" + +createEntities: + - client: + id: &client0 client0 + useMultipleMongoses: false + - database: + id: &database0 database0 + client: *client0 + databaseName: &database0Name crud-tests + - collection: + id: &collection0 collection0 + database: *database0 + collectionName: &collection0Name test + +tests: + # Some drivers may still need to skip this test because the CRUD spec does not + # prescribe how drivers should formulate a WriteException beyond collecting a + # write or write concern error. + - description: "delete operations support errorResponse assertions" + runOnRequirements: + - minServerVersion: "4.0.0" + topologies: [ single, replicaset ] + - minServerVersion: "4.2.0" + topologies: [ sharded ] + operations: + - name: failPoint + object: testRunner + arguments: + client: *client0 + failPoint: + configureFailPoint: failCommand + mode: { times: 1 } + data: + failCommands: [ delete ] + errorCode: &errorCode 8 # UnknownError + - name: deleteOne + object: *collection0 + arguments: + filter: { _id: 1 } + expectError: + errorCode: *errorCode + errorResponse: + code: *errorCode diff --git a/test/spec/crud/unified/findOneAndUpdate-errorResponse.json b/test/spec/crud/unified/findOneAndUpdate-errorResponse.json new file mode 100644 index 00000000000..5023a450f33 --- /dev/null +++ b/test/spec/crud/unified/findOneAndUpdate-errorResponse.json @@ -0,0 +1,132 @@ +{ + "description": "findOneAndUpdate-errorResponse", + "schemaVersion": "1.12", + "createEntities": [ + { + "client": { + "id": "client0" + } + }, + { + "database": { + "id": "database0", + "client": "client0", + "databaseName": "crud-tests" + } + }, + { + "collection": { + "id": "collection0", + "database": "database0", + "collectionName": "test" + } + } + ], + "initialData": [ + { + "collectionName": "test", + "databaseName": "crud-tests", + "documents": [ + { + "_id": 1, + "x": "foo" + } + ] + } + ], + "tests": [ + { + "description": "findOneAndUpdate DuplicateKey error is accessible", + "runOnRequirements": [ + { + "minServerVersion": "4.2" + } + ], + "operations": [ + { + "name": "createIndex", + "object": "collection0", + "arguments": { + "keys": { + "x": 1 + }, + "unique": true + } + }, + { + "name": "findOneAndUpdate", + "object": "collection0", + "arguments": { + "filter": { + "_id": 2 + }, + "update": { + "$set": { + "x": "foo" + } + }, + "upsert": true + }, + "expectError": { + "errorCode": 11000, + "errorResponse": { + "keyPattern": { + "x": 1 + }, + "keyValue": { + "x": "foo" + } + } + } + } + ] + }, + { + "description": "findOneAndUpdate document validation errInfo is accessible", + "runOnRequirements": [ + { + "minServerVersion": "5.0" + } + ], + "operations": [ + { + "name": "modifyCollection", + "object": "database0", + "arguments": { + "collection": "test", + "validator": { + "x": { + "$type": "string" + } + } + } + }, + { + "name": "findOneAndUpdate", + "object": "collection0", + "arguments": { + "filter": { + "_id": 1 + }, + "update": { + "$set": { + "x": 1 + } + } + }, + "expectError": { + "errorCode": 121, + "errorResponse": { + "errInfo": { + "failingDocumentId": 1, + "details": { + "$$type": "object" + } + } + } + } + } + ] + } + ] +} diff --git a/test/spec/crud/unified/findOneAndUpdate-errorResponse.yml b/test/spec/crud/unified/findOneAndUpdate-errorResponse.yml new file mode 100644 index 00000000000..8faed768097 --- /dev/null +++ b/test/spec/crud/unified/findOneAndUpdate-errorResponse.yml @@ -0,0 +1,69 @@ +description: "findOneAndUpdate-errorResponse" + +schemaVersion: "1.12" + +createEntities: + - client: + id: &client0 client0 + - database: + id: &database0 database0 + client: *client0 + databaseName: &database0Name crud-tests + - collection: + id: &collection0 collection0 + database: *database0 + collectionName: &collection0Name test + +initialData: &initialData + - collectionName: *collection0Name + databaseName: *database0Name + documents: + - { _id: 1, x: "foo" } + +tests: + - description: "findOneAndUpdate DuplicateKey error is accessible" + runOnRequirements: + - minServerVersion: "4.2" # SERVER-37124 + operations: + - name: createIndex + object: *collection0 + arguments: + keys: { x: 1 } + unique: true + - name: findOneAndUpdate + object: *collection0 + arguments: + filter: { _id: 2 } + update: { $set: { x: "foo" } } + upsert: true + expectError: + errorCode: 11000 # DuplicateKey + errorResponse: + keyPattern: { x: 1 } + keyValue: { x: "foo" } + + - description: "findOneAndUpdate document validation errInfo is accessible" + runOnRequirements: + - minServerVersion: "5.0" + operations: + - name: modifyCollection + object: *database0 + arguments: + collection: *collection0Name + validator: + x: { $type: "string" } + - name: findOneAndUpdate + object: *collection0 + arguments: + filter: { _id: 1 } + update: { $set: { x: 1 } } + expectError: + errorCode: 121 # DocumentValidationFailure + errorResponse: + # Avoid asserting the exact contents of errInfo as it may vary by + # server version. Likewise, this is why drivers do not model the + # document. The following is sufficient to test that validation + # details are accessible. See SERVER-20547 for more context. + errInfo: + failingDocumentId: 1 + details: { $$type: "object" } diff --git a/test/spec/crud/unified/insertOne-errorResponse.json b/test/spec/crud/unified/insertOne-errorResponse.json new file mode 100644 index 00000000000..04ea6a74513 --- /dev/null +++ b/test/spec/crud/unified/insertOne-errorResponse.json @@ -0,0 +1,82 @@ +{ + "description": "insertOne-errorResponse", + "schemaVersion": "1.12", + "createEntities": [ + { + "client": { + "id": "client0", + "useMultipleMongoses": false + } + }, + { + "database": { + "id": "database0", + "client": "client0", + "databaseName": "crud-tests" + } + }, + { + "collection": { + "id": "collection0", + "database": "database0", + "collectionName": "test" + } + } + ], + "tests": [ + { + "description": "insert operations support errorResponse assertions", + "runOnRequirements": [ + { + "minServerVersion": "4.0.0", + "topologies": [ + "single", + "replicaset" + ] + }, + { + "minServerVersion": "4.2.0", + "topologies": [ + "sharded" + ] + } + ], + "operations": [ + { + "name": "failPoint", + "object": "testRunner", + "arguments": { + "client": "client0", + "failPoint": { + "configureFailPoint": "failCommand", + "mode": { + "times": 1 + }, + "data": { + "failCommands": [ + "insert" + ], + "errorCode": 8 + } + } + } + }, + { + "name": "insertOne", + "object": "collection0", + "arguments": { + "document": { + "_id": 1 + } + }, + "expectError": { + "errorCode": 8, + "errorResponse": { + "code": 8 + } + } + } + ] + } + ] +} diff --git a/test/spec/crud/unified/insertOne-errorResponse.yml b/test/spec/crud/unified/insertOne-errorResponse.yml new file mode 100644 index 00000000000..b14caa1737f --- /dev/null +++ b/test/spec/crud/unified/insertOne-errorResponse.yml @@ -0,0 +1,46 @@ +description: "insertOne-errorResponse" + +schemaVersion: "1.12" + +createEntities: + - client: + id: &client0 client0 + useMultipleMongoses: false + - database: + id: &database0 database0 + client: *client0 + databaseName: &database0Name crud-tests + - collection: + id: &collection0 collection0 + database: *database0 + collectionName: &collection0Name test + +tests: + # Some drivers may still need to skip this test because the CRUD spec does not + # prescribe how drivers should formulate a WriteException beyond collecting a + # write or write concern error. + - description: "insert operations support errorResponse assertions" + runOnRequirements: + - minServerVersion: "4.0.0" + topologies: [ single, replicaset ] + - minServerVersion: "4.2.0" + topologies: [ sharded ] + operations: + - name: failPoint + object: testRunner + arguments: + client: *client0 + failPoint: + configureFailPoint: failCommand + mode: { times: 1 } + data: + failCommands: [ insert ] + errorCode: &errorCode 8 # UnknownError + - name: insertOne + object: *collection0 + arguments: + document: { _id: 1 } + expectError: + errorCode: *errorCode + errorResponse: + code: *errorCode diff --git a/test/spec/crud/unified/updateOne-errorResponse.json b/test/spec/crud/unified/updateOne-errorResponse.json new file mode 100644 index 00000000000..0ceddbc4fcd --- /dev/null +++ b/test/spec/crud/unified/updateOne-errorResponse.json @@ -0,0 +1,87 @@ +{ + "description": "updateOne-errorResponse", + "schemaVersion": "1.12", + "createEntities": [ + { + "client": { + "id": "client0", + "useMultipleMongoses": false + } + }, + { + "database": { + "id": "database0", + "client": "client0", + "databaseName": "crud-tests" + } + }, + { + "collection": { + "id": "collection0", + "database": "database0", + "collectionName": "test" + } + } + ], + "tests": [ + { + "description": "update operations support errorResponse assertions", + "runOnRequirements": [ + { + "minServerVersion": "4.0.0", + "topologies": [ + "single", + "replicaset" + ] + }, + { + "minServerVersion": "4.2.0", + "topologies": [ + "sharded" + ] + } + ], + "operations": [ + { + "name": "failPoint", + "object": "testRunner", + "arguments": { + "client": "client0", + "failPoint": { + "configureFailPoint": "failCommand", + "mode": { + "times": 1 + }, + "data": { + "failCommands": [ + "update" + ], + "errorCode": 8 + } + } + } + }, + { + "name": "updateOne", + "object": "collection0", + "arguments": { + "filter": { + "_id": 1 + }, + "update": { + "$set": { + "x": 1 + } + } + }, + "expectError": { + "errorCode": 8, + "errorResponse": { + "code": 8 + } + } + } + ] + } + ] +} diff --git a/test/spec/crud/unified/updateOne-errorResponse.yml b/test/spec/crud/unified/updateOne-errorResponse.yml new file mode 100644 index 00000000000..6d42195b0bc --- /dev/null +++ b/test/spec/crud/unified/updateOne-errorResponse.yml @@ -0,0 +1,47 @@ +description: "updateOne-errorResponse" + +schemaVersion: "1.12" + +createEntities: + - client: + id: &client0 client0 + useMultipleMongoses: false + - database: + id: &database0 database0 + client: *client0 + databaseName: &database0Name crud-tests + - collection: + id: &collection0 collection0 + database: *database0 + collectionName: &collection0Name test + +tests: + # Some drivers may still need to skip this test because the CRUD spec does not + # prescribe how drivers should formulate a WriteException beyond collecting a + # write or write concern error. + - description: "update operations support errorResponse assertions" + runOnRequirements: + - minServerVersion: "4.0.0" + topologies: [ single, replicaset ] + - minServerVersion: "4.2.0" + topologies: [ sharded ] + operations: + - name: failPoint + object: testRunner + arguments: + client: *client0 + failPoint: + configureFailPoint: failCommand + mode: { times: 1 } + data: + failCommands: [ update ] + errorCode: &errorCode 8 # UnknownError + - name: updateOne + object: *collection0 + arguments: + filter: { _id: 1 } + update: { $set: { x: 1 } } + expectError: + errorCode: *errorCode + errorResponse: + code: *errorCode diff --git a/test/spec/unified-test-format/invalid/expectedError-errorResponse-type.json b/test/spec/unified-test-format/invalid/expectedError-errorResponse-type.json new file mode 100644 index 00000000000..6eb66d9b0b5 --- /dev/null +++ b/test/spec/unified-test-format/invalid/expectedError-errorResponse-type.json @@ -0,0 +1,25 @@ +{ + "description": "expectedError-errorResponse-type", + "schemaVersion": "1.12", + "createEntities": [ + { + "client": { + "id": "client0" + } + } + ], + "tests": [ + { + "description": "foo", + "operations": [ + { + "name": "foo", + "object": "client0", + "expectError": { + "errorResponse": 0 + } + } + ] + } + ] +} diff --git a/test/spec/unified-test-format/invalid/expectedError-errorResponse-type.yml b/test/spec/unified-test-format/invalid/expectedError-errorResponse-type.yml new file mode 100644 index 00000000000..e63f6ce8929 --- /dev/null +++ b/test/spec/unified-test-format/invalid/expectedError-errorResponse-type.yml @@ -0,0 +1,15 @@ +description: "expectedError-errorResponse-type" + +schemaVersion: "1.12" + +createEntities: + - client: + id: &client0 "client0" + +tests: + - description: "foo" + operations: + - name: "foo" + object: *client0 + expectError: + errorResponse: 0 diff --git a/test/spec/unified-test-format/valid-pass/expectedError-errorResponse.json b/test/spec/unified-test-format/valid-pass/expectedError-errorResponse.json new file mode 100644 index 00000000000..177b1baf56a --- /dev/null +++ b/test/spec/unified-test-format/valid-pass/expectedError-errorResponse.json @@ -0,0 +1,70 @@ +{ + "description": "expectedError-errorResponse", + "schemaVersion": "1.12", + "createEntities": [ + { + "client": { + "id": "client0" + } + }, + { + "database": { + "id": "database0", + "client": "client0", + "databaseName": "test" + } + }, + { + "collection": { + "id": "collection0", + "database": "database0", + "collectionName": "coll0" + } + } + ], + "tests": [ + { + "description": "Unsupported command", + "operations": [ + { + "name": "runCommand", + "object": "database0", + "arguments": { + "commandName": "unsupportedCommand", + "command": { + "unsupportedCommand": 1 + } + }, + "expectError": { + "errorResponse": { + "errmsg": { + "$$type": "string" + } + } + } + } + ] + }, + { + "description": "Unsupported query operator", + "operations": [ + { + "name": "find", + "object": "collection0", + "arguments": { + "filter": { + "$unsupportedQueryOperator": 1 + } + }, + "expectError": { + "errorResponse": { + "errmsg": { + "$$type": "string" + } + } + } + } + ] + } + ] +} diff --git a/test/spec/unified-test-format/valid-pass/expectedError-errorResponse.yml b/test/spec/unified-test-format/valid-pass/expectedError-errorResponse.yml new file mode 100644 index 00000000000..e10c25a1ed8 --- /dev/null +++ b/test/spec/unified-test-format/valid-pass/expectedError-errorResponse.yml @@ -0,0 +1,39 @@ +description: "expectedError-errorResponse" + +schemaVersion: "1.12" + +createEntities: + - client: + id: &client0 client0 + - database: + id: &database0 database0 + client: *client0 + databaseName: &database0Name test + - collection: + id: &collection0 collection0 + database: *database0 + collectionName: &collection0Name coll0 + +tests: + - description: "Unsupported command" + operations: + - name: runCommand + object: *database0 + arguments: + commandName: unsupportedCommand + command: { unsupportedCommand: 1 } + expectError: + # Avoid asserting the exact error since it may vary by server version + errorResponse: + errmsg: { $$type: "string" } + + - description: "Unsupported query operator" + operations: + - name: find + object: *collection0 + arguments: + filter: { $unsupportedQueryOperator: 1 } + expectError: + # Avoid asserting the exact error since it may vary by server version + errorResponse: + errmsg: { $$type: "string" } diff --git a/test/tools/cmap_spec_runner.ts b/test/tools/cmap_spec_runner.ts index 9d0817548f1..a497e6e1923 100644 --- a/test/tools/cmap_spec_runner.ts +++ b/test/tools/cmap_spec_runner.ts @@ -4,6 +4,7 @@ import { clearTimeout, setTimeout } from 'timers'; import { promisify } from 'util'; import { + addContainerMetadata, CMAP_EVENTS, type Connection, ConnectionPool, @@ -369,6 +370,7 @@ async function runCmapTest(test: CmapTest, threadContext: ThreadContext) { } const metadata = makeClientMetadata({ appName: poolOptions.appName, driverInfo: {} }); + const extendedMetadata = addContainerMetadata(metadata); delete poolOptions.appName; const operations = test.operations; @@ -380,7 +382,12 @@ async function runCmapTest(test: CmapTest, threadContext: ThreadContext) { const mainThread = threadContext.getThread(MAIN_THREAD_KEY); mainThread.start(); - threadContext.createPool({ ...poolOptions, metadata, minPoolSizeCheckFrequencyMS }); + threadContext.createPool({ + ...poolOptions, + metadata, + extendedMetadata, + minPoolSizeCheckFrequencyMS + }); // yield control back to the event loop so that the ConnectionPoolCreatedEvent // has a chance to be fired before any synchronously-emitted events from // the queued operations diff --git a/test/tools/unified-spec-runner/entities.ts b/test/tools/unified-spec-runner/entities.ts index 0e5a8f3d5d7..e36afdc5d16 100644 --- a/test/tools/unified-spec-runner/entities.ts +++ b/test/tools/unified-spec-runner/entities.ts @@ -7,6 +7,7 @@ import { ChangeStream, ClientEncryption, ClientSession, + type ClusterTime, Collection, type CommandFailedEvent, type CommandStartedEvent, @@ -556,6 +557,7 @@ export class EntitiesMap extends Map { static async createEntities( config: TestConfiguration, + clusterTime: ClusterTime | null, entities?: EntityDescription[], entityMap?: EntitiesMap ): Promise { @@ -627,6 +629,10 @@ export class EntitiesMap extends Map { } } const session = client.startSession(options); + // Advance the session cluster time. See DRIVERS-2816. + if (clusterTime) { + session.advanceClusterTime(clusterTime); + } map.set(entity.session.id, session); } else if ('bucket' in entity) { const db = map.getEntity('db', entity.bucket.database); diff --git a/test/tools/unified-spec-runner/match.ts b/test/tools/unified-spec-runner/match.ts index 0e9ea113c8d..e12062bf85c 100644 --- a/test/tools/unified-spec-runner/match.ts +++ b/test/tools/unified-spec-runner/match.ts @@ -771,4 +771,8 @@ export function expectErrorCheck( if (expected.expectResult != null) { resultCheck(error, expected.expectResult as any, entities); } + + if (expected.errorResponse != null) { + resultCheck(error, expected.errorResponse, entities); + } } diff --git a/test/tools/unified-spec-runner/operations.ts b/test/tools/unified-spec-runner/operations.ts index 37cc74a0ee0..541ec5a1e45 100644 --- a/test/tools/unified-spec-runner/operations.ts +++ b/test/tools/unified-spec-runner/operations.ts @@ -45,7 +45,7 @@ operations.set('createEntities', async ({ entities, operation, testConfig }) => if (!operation.arguments?.entities) { throw new Error('encountered createEntities operation without entities argument'); } - await EntitiesMap.createEntities(testConfig, operation.arguments.entities!, entities); + await EntitiesMap.createEntities(testConfig, null, operation.arguments.entities!, entities); }); operations.set('abortTransaction', async ({ entities, operation }) => { diff --git a/test/tools/unified-spec-runner/runner.ts b/test/tools/unified-spec-runner/runner.ts index 9f1fb3925f5..1f27dc08115 100644 --- a/test/tools/unified-spec-runner/runner.ts +++ b/test/tools/unified-spec-runner/runner.ts @@ -160,8 +160,15 @@ async function runUnifiedTest( } } + const ping = await utilClient.db().admin().command({ ping: 1 }); + const clusterTime = ping.$clusterTime; + trace('createEntities'); - entities = await EntitiesMap.createEntities(ctx.configuration, unifiedSuite.createEntities); + entities = await EntitiesMap.createEntities( + ctx.configuration, + clusterTime, + unifiedSuite.createEntities + ); // Workaround for SERVER-39704: // test runners MUST execute a non-transactional distinct command on diff --git a/test/tools/unified-spec-runner/schema.ts b/test/tools/unified-spec-runner/schema.ts index 3b3daef8042..353231cb998 100644 --- a/test/tools/unified-spec-runner/schema.ts +++ b/test/tools/unified-spec-runner/schema.ts @@ -368,6 +368,7 @@ export interface ExpectedError { errorLabelsContain?: string[]; errorLabelsOmit?: string[]; expectResult?: unknown; + errorResponse?: Document; } export interface ExpectedLogMessage { diff --git a/test/unit/cmap/connect.test.ts b/test/unit/cmap/connect.test.ts index 7697c124fbe..65ad159b0b1 100644 --- a/test/unit/cmap/connect.test.ts +++ b/test/unit/cmap/connect.test.ts @@ -1,6 +1,7 @@ import { expect } from 'chai'; import { + addContainerMetadata, CancellationToken, type ClientMetadata, connect, @@ -23,6 +24,7 @@ const CONNECT_DEFAULTS = { generation: 1, monitorCommands: false, metadata: {} as ClientMetadata, + extendedMetadata: addContainerMetadata({} as ClientMetadata), loadBalanced: false }; @@ -185,9 +187,164 @@ describe('Connect Tests', function () { expect(error).to.be.instanceOf(MongoNetworkError); }); - context('prepareHandshakeDocument', () => { + describe('prepareHandshakeDocument', () => { + describe('client environment (containers and FAAS)', () => { + const cachedEnv = process.env; + + context('when only kubernetes is present', () => { + let authContext; + + beforeEach(() => { + process.env.KUBERNETES_SERVICE_HOST = 'I exist'; + authContext = { + connection: {}, + options: { + ...CONNECT_DEFAULTS, + extendedMetadata: addContainerMetadata({} as ClientMetadata) + } + }; + }); + + afterEach(() => { + if (cachedEnv.KUBERNETES_SERVICE_HOST != null) { + process.env.KUBERNETES_SERVICE_HOST = cachedEnv.KUBERNETES_SERVICE_HOST; + } else { + delete process.env.KUBERNETES_SERVICE_HOST; + } + authContext = {}; + }); + + it(`should include { orchestrator: 'kubernetes'} in client.env.container`, async () => { + const handshakeDocument = await prepareHandshakeDocument(authContext); + expect(handshakeDocument.client.env.container.orchestrator).to.equal('kubernetes'); + }); + + it(`should not have 'name' property in client.env `, async () => { + const handshakeDocument = await prepareHandshakeDocument(authContext); + expect(handshakeDocument.client.env).to.not.have.property('name'); + }); + + context('when 512 byte size limit is exceeded', async () => { + it(`should not 'env' property in client`, async () => { + // make metadata = 507 bytes, so it takes up entire LimitedSizeDocument + const longAppName = 's'.repeat(493); + const longAuthContext = { + connection: {}, + options: { + ...CONNECT_DEFAULTS, + extendedMetadata: addContainerMetadata({ appName: longAppName }) + } + }; + const handshakeDocument = await prepareHandshakeDocument(longAuthContext); + expect(handshakeDocument.client).to.not.have.property('env'); + }); + }); + }); + + context('when kubernetes and FAAS are both present', () => { + let authContext; + + beforeEach(() => { + process.env.KUBERNETES_SERVICE_HOST = 'I exist'; + authContext = { + connection: {}, + options: { + ...CONNECT_DEFAULTS, + extendedMetadata: addContainerMetadata({ env: { name: 'aws.lambda' } }) + } + }; + }); + + afterEach(() => { + if (cachedEnv.KUBERNETES_SERVICE_HOST != null) { + process.env.KUBERNETES_SERVICE_HOST = cachedEnv.KUBERNETES_SERVICE_HOST; + } else { + delete process.env.KUBERNETES_SERVICE_HOST; + } + authContext = {}; + }); + + it(`should include { orchestrator: 'kubernetes'} in client.env.container`, async () => { + const handshakeDocument = await prepareHandshakeDocument(authContext); + expect(handshakeDocument.client.env.container.orchestrator).to.equal('kubernetes'); + }); + + it(`should still have properly set 'name' property in client.env `, async () => { + const handshakeDocument = await prepareHandshakeDocument(authContext); + expect(handshakeDocument.client.env.name).to.equal('aws.lambda'); + }); + + context('when 512 byte size limit is exceeded', async () => { + it(`should not have 'container' property in client.env`, async () => { + // make metadata = 507 bytes, so it takes up entire LimitedSizeDocument + const longAppName = 's'.repeat(447); + const longAuthContext = { + connection: {}, + options: { + ...CONNECT_DEFAULTS, + extendedMetadata: { + appName: longAppName, + env: { name: 'aws.lambda' } + } as unknown as Promise + } + }; + const handshakeDocument = await prepareHandshakeDocument(longAuthContext); + expect(handshakeDocument.client.env.name).to.equal('aws.lambda'); + expect(handshakeDocument.client.env).to.not.have.property('container'); + }); + }); + }); + + context('when container nor FAAS env is not present (empty string case)', () => { + const authContext = { + connection: {}, + options: { ...CONNECT_DEFAULTS } + }; + + context('when process.env.KUBERNETES_SERVICE_HOST = undefined', () => { + beforeEach(() => { + delete process.env.KUBERNETES_SERVICE_HOST; + }); + + afterEach(() => { + afterEach(() => { + if (cachedEnv.KUBERNETES_SERVICE_HOST != null) { + process.env.KUBERNETES_SERVICE_HOST = cachedEnv.KUBERNETES_SERVICE_HOST; + } else { + delete process.env.KUBERNETES_SERVICE_HOST; + } + }); + }); + + it(`should not have 'env' property in client`, async () => { + const handshakeDocument = await prepareHandshakeDocument(authContext); + expect(handshakeDocument.client).to.not.have.property('env'); + }); + }); + + context('when process.env.KUBERNETES_SERVICE_HOST is an empty string', () => { + beforeEach(() => { + process.env.KUBERNETES_SERVICE_HOST = ''; + }); + + afterEach(() => { + if (cachedEnv.KUBERNETES_SERVICE_HOST != null) { + process.env.KUBERNETES_SERVICE_HOST = cachedEnv.KUBERNETES_SERVICE_HOST; + } else { + delete process.env.KUBERNETES_SERVICE_HOST; + } + }); + + it(`should not have 'env' property in client`, async () => { + const handshakeDocument = await prepareHandshakeDocument(authContext); + expect(handshakeDocument.client).to.not.have.property('env'); + }); + }); + }); + }); + context('when serverApi.version is present', () => { - const options = {}; + const options = { ...CONNECT_DEFAULTS }; const authContext = { connection: { serverApi: { version: '1' } }, options @@ -200,7 +357,7 @@ describe('Connect Tests', function () { }); context('when serverApi is not present', () => { - const options = {}; + const options = { ...CONNECT_DEFAULTS }; const authContext = { connection: {}, options @@ -216,7 +373,7 @@ describe('Connect Tests', function () { context('when loadBalanced is not set as an option', () => { const authContext = { connection: {}, - options: {} + options: { ...CONNECT_DEFAULTS } }; it('does not set loadBalanced on the handshake document', async () => { @@ -238,7 +395,7 @@ describe('Connect Tests', function () { context('when loadBalanced is set to false', () => { const authContext = { connection: {}, - options: { loadBalanced: false } + options: { ...CONNECT_DEFAULTS, loadBalanced: false } }; it('does not set loadBalanced on the handshake document', async () => { @@ -260,7 +417,7 @@ describe('Connect Tests', function () { context('when loadBalanced is set to true', () => { const authContext = { connection: {}, - options: { loadBalanced: true } + options: { ...CONNECT_DEFAULTS, loadBalanced: true } }; it('sets loadBalanced on the handshake document', async () => { diff --git a/test/unit/cmap/connection_pool.test.js b/test/unit/cmap/connection_pool.test.js index b6e408e3d56..a9ea375c7f5 100644 --- a/test/unit/cmap/connection_pool.test.js +++ b/test/unit/cmap/connection_pool.test.js @@ -22,6 +22,9 @@ describe('Connection Pool', function () { }, s: { authProviders: new MongoClientAuthProviders() + }, + options: { + extendedMetadata: {} } } } diff --git a/test/unit/connection_string.test.ts b/test/unit/connection_string.test.ts index 30ba650d6d2..00069ced53a 100644 --- a/test/unit/connection_string.test.ts +++ b/test/unit/connection_string.test.ts @@ -896,7 +896,8 @@ describe('Connection String', function () { }); const log: Log = { t: new Date(), c: 'ConnectionStringStdErr', s: 'error' }; client.options.mongoLoggerOptions.logDestination.write(log); - expect(stderrStub.write).calledWith(inspect(log, { breakLength: Infinity, compact: true })); + const logLine = inspect(log, { breakLength: Infinity, compact: true }); + expect(stderrStub.write).calledWith(`${logLine}\n`); }); }); @@ -907,7 +908,8 @@ describe('Connection String', function () { }); const log: Log = { t: new Date(), c: 'ConnectionStringStdOut', s: 'error' }; client.options.mongoLoggerOptions.logDestination.write(log); - expect(stdoutStub.write).calledWith(inspect(log, { breakLength: Infinity, compact: true })); + const logLine = inspect(log, { breakLength: Infinity, compact: true }); + expect(stdoutStub.write).calledWith(`${logLine}\n`); }); }); diff --git a/test/unit/error.test.ts b/test/unit/error.test.ts index 678398d27ee..4a9d1423a5b 100644 --- a/test/unit/error.test.ts +++ b/test/unit/error.test.ts @@ -113,6 +113,24 @@ describe('MongoErrors', () => { expect(err.message).to.equal(errorMessage); expect(err.someData).to.equal(12345); }); + context('errorResponse property', function () { + it(`should set errorResponse to raw results document passed in`, function () { + const errorDoc = { message: 'A test error', someData: 12345 }; + const err = new MongoServerError(errorDoc); + expect(err).to.be.an.instanceof(Error); + expect(err.errorResponse).to.deep.equal(errorDoc); + }); + it(`should not construct enumerated key 'errorResponse' if present`, function () { + const errorDoc = { + message: 'A test error', + errorResponse: 'I will not be an enumerated key' + }; + const err = new MongoServerError(errorDoc); + expect(err).to.be.an.instanceof(Error); + expect(err.errorResponse).to.deep.equal(errorDoc); + expect(err.errorResponse?.errorResponse).to.deep.equal('I will not be an enumerated key'); + }); + }); }); describe('MongoNetworkError#constructor', () => { diff --git a/test/unit/mongo_client.test.js b/test/unit/mongo_client.test.js index 3cd8ef52988..2fb5121b1e6 100644 --- a/test/unit/mongo_client.test.js +++ b/test/unit/mongo_client.test.js @@ -852,9 +852,8 @@ describe('MongoClient', function () { }); const log = { t: new Date(), c: 'constructorStdErr', s: 'error' }; client.options.mongoLoggerOptions.logDestination.write(log); - expect(stderrStub.write).calledWith( - inspect(log, { breakLength: Infinity, compact: true }) - ); + const logLine = inspect(log, { breakLength: Infinity, compact: true }); + expect(stderrStub.write).calledWith(`${logLine}\n`); }); }); @@ -882,9 +881,8 @@ describe('MongoClient', function () { }); const log = { t: new Date(), c: 'constructorStdOut', s: 'error' }; client.options.mongoLoggerOptions.logDestination.write(log); - expect(stdoutStub.write).calledWith( - inspect(log, { breakLength: Infinity, compact: true }) - ); + const logLine = inspect(log, { breakLength: Infinity, compact: true }); + expect(stdoutStub.write).calledWith(`${logLine}\n`); }); }); @@ -939,9 +937,8 @@ describe('MongoClient', function () { }); const log = { t: new Date(), c: 'constructorStdErr', s: 'error' }; client.options.mongoLoggerOptions.logDestination.write(log); - expect(stderrStub.write).calledWith( - inspect(log, { breakLength: Infinity, compact: true }) - ); + const logLine = inspect(log, { breakLength: Infinity, compact: true }); + expect(stderrStub.write).calledWith(`${logLine}\n`); }); }); }); diff --git a/test/unit/mongo_logger.test.ts b/test/unit/mongo_logger.test.ts index 2e9cecf66d6..454e7b1cc79 100644 --- a/test/unit/mongo_logger.test.ts +++ b/test/unit/mongo_logger.test.ts @@ -443,9 +443,8 @@ describe('class MongoLogger', async function () { const log: Log = { t: new Date(), c: 'command', s: 'error' }; options.logDestination.write(log); - expect(stderrStub.write).to.have.been.calledOnceWith( - inspect(log, { breakLength: Infinity, compact: true }) - ); + const logLine = inspect(log, { breakLength: Infinity, compact: true }); + expect(stderrStub.write).to.have.been.calledOnceWith(`${logLine}\n`); }); } } @@ -465,9 +464,8 @@ describe('class MongoLogger', async function () { const log: Log = { t: new Date(), c: 'command', s: 'error' }; options.logDestination.write(log); - expect(stderrStub.write).to.have.been.calledOnceWith( - inspect(log, { breakLength: Infinity, compact: true }) - ); + const logLine = inspect(log, { breakLength: Infinity, compact: true }); + expect(stderrStub.write).to.have.been.calledOnceWith(`${logLine}\n`); }); } } @@ -512,9 +510,8 @@ describe('class MongoLogger', async function () { const log: Log = { t: new Date(), c: 'command', s: 'error' }; options.logDestination.write(log); - expect(stderrStub.write).to.have.been.calledOnceWith( - inspect(log, { breakLength: Infinity, compact: true }) - ); + const logLine = inspect(log, { breakLength: Infinity, compact: true }); + expect(stderrStub.write).to.have.been.calledOnceWith(`${logLine}\n`); }); } } @@ -536,9 +533,8 @@ describe('class MongoLogger', async function () { const log: Log = { t: new Date(), c: 'command', s: 'error' }; options.logDestination.write(log); - expect(stderrStub.write).to.have.been.calledOnceWith( - inspect(log, { breakLength: Infinity, compact: true }) - ); + const logLine = inspect(log, { breakLength: Infinity, compact: true }); + expect(stderrStub.write).to.have.been.calledOnceWith(`${logLine}\n`); }); } } @@ -1399,9 +1395,8 @@ describe('class MongoLogger', async function () { logger.debug('client', 'random message'); let stderrStubCall = stderrStub.write.getCall(0).args[0]; stderrStubCall = stderrStubCall.slice(stderrStubCall.search('c:')); - expect(stderrStubCall).to.equal( - `c: 'client', s: 'error', message: 'User input for mongodbLogPath is now invalid. Logging is halted.', error: 'This writable always throws' }` - ); + const expectedLogLine1 = `c: 'client', s: 'error', message: 'User input for mongodbLogPath is now invalid. Logging is halted.', error: 'This writable always throws' }`; + expect(stderrStubCall).to.equal(`${expectedLogLine1}\n`); // logging is halted logger.debug('client', 'random message 2'); @@ -1450,9 +1445,8 @@ describe('class MongoLogger', async function () { // stderr now contains the error message let stderrStubCall = stderrStub.write.getCall(0).args[0]; stderrStubCall = stderrStubCall.slice(stderrStubCall.search('c:')); - expect(stderrStubCall).to.equal( - `c: 'client', s: 'error', message: 'User input for mongodbLogPath is now invalid. Logging is halted.', error: 'This writable always throws, but only after at least 500ms' }` - ); + const expectedLogLine1 = `c: 'client', s: 'error', message: 'User input for mongodbLogPath is now invalid. Logging is halted.', error: 'This writable always throws, but only after at least 500ms' }`; + expect(stderrStubCall).to.equal(`${expectedLogLine1}\n`); // no more logging in the future logger.debug('client', 'random message 2'); @@ -1480,7 +1474,7 @@ describe('class MongoLogger', async function () { let stderrStubCall = stderrStub.write.getCall(0).args[0]; stderrStubCall = stderrStubCall.slice(stderrStubCall.search('c:')); expect(stderrStubCall).to.equal( - `c: 'client', s: 'error', message: 'User input for mongodbLogPath is now invalid. Logging is halted.', error: 'I am stdout and do not work' }` + `c: 'client', s: 'error', message: 'User input for mongodbLogPath is now invalid. Logging is halted.', error: 'I am stdout and do not work' }\n` ); // logging is halted