diff --git a/.evergreen/run-serverless-tests.sh b/.evergreen/run-serverless-tests.sh index dbe8db4901..298bfa51f5 100644 --- a/.evergreen/run-serverless-tests.sh +++ b/.evergreen/run-serverless-tests.sh @@ -11,7 +11,8 @@ if [ -z ${MONGODB_URI+omitted} ]; then echo "MONGODB_URI is unset" && exit 1; fi if [ -z ${SERVERLESS_ATLAS_USER+omitted} ]; then echo "SERVERLESS_ATLAS_USER is unset" && exit 1; fi if [ -z ${SERVERLESS_ATLAS_PASSWORD+omitted} ]; then echo "SERVERLESS_ATLAS_PASSWORD is unset" && exit 1; fi -npx mocha --file test/tools/runner/index.js \ +npx mocha \ + --config test/mocha_mongodb.json \ test/integration/crud/crud.spec.test.js \ test/integration/crud/crud.prose.test.js \ test/integration/retryable-reads/retryable_reads.spec.test.js \ diff --git a/.mocharc.json b/.mocharc.json index f221881fc4..903d57a09b 100644 --- a/.mocharc.json +++ b/.mocharc.json @@ -1,14 +1,11 @@ { "$schema": "https://raw.githubusercontent.com/SchemaStore/schemastore/master/src/schemas/json/mocharc.json", - "extension": [ - "js", - "ts" - ], "require": [ "source-map-support/register", "ts-node/register", - "test/tools/runner/chai-addons" + "test/tools/runner/chai-addons.js" ], + "extension": ["js", "ts"], "ui": "test/tools/runner/metadata_ui.js", "recursive": true, "timeout": 60000, diff --git a/global.d.ts b/global.d.ts index a00ba5fb4a..59346ae604 100644 --- a/global.d.ts +++ b/global.d.ts @@ -1,4 +1,4 @@ -import type { TestConfiguration } from './test/tools/unified-spec-runner/runner'; +import type { TestConfiguration } from './test/tools/runner/config'; type WithExclusion = `!${T}` /** Defined in test/tools/runner/filters/mongodb_topology_filter.js (topologyTypeToString) */ diff --git a/package.json b/package.json index 67c8d29094..41bff208ab 100644 --- a/package.json +++ b/package.json @@ -95,7 +95,7 @@ }, "scripts": { "build:evergreen": "node .evergreen/generate_evergreen_tasks.js", - "build:ts": "rimraf lib && ./node_modules/typescript/bin/tsc", + "build:ts": "rimraf lib && node ./node_modules/typescript/bin/tsc", "build:dts": "npm run build:ts && api-extractor run && rimraf 'lib/**/*.d.ts*' && downlevel-dts mongodb.d.ts mongodb.ts34.d.ts", "build:docs": "typedoc", "check:bench": "node test/benchmarks/driverBench", @@ -104,19 +104,19 @@ "check:lint": "npm run build:dts && npm run check:dts && npm run check:eslint && npm run check:tsd", "check:eslint": "eslint -v && eslint --max-warnings=0 --ext '.js,.ts' src test", "check:tsd": "tsd --version && tsd", - "check:dts": "./node_modules/typescript/bin/tsc --noEmit mongodb.d.ts && tsd", - "check:test": "mocha --file test/tools/runner test/integration", - "check:unit": "mocha test/unit/", - "check:ts": "./node_modules/typescript/bin/tsc -v && ./node_modules/typescript/bin/tsc --noEmit", - "check:atlas": "mocha --config \"test/manual/mocharc.json\" test/manual/atlas_connectivity.test.js", - "check:adl": "mocha --file test/tools/runner test/manual/atlas-data-lake-testing", - "check:aws": "mocha --file test/tools/runner test/integration/auth/mongodb_aws.test.js", - "check:ocsp": "mocha --config \"test/manual/mocharc.json\" test/manual/ocsp_support.test.js", - "check:kerberos": "mocha --config \"test/manual/mocharc.json\" test/manual/kerberos.test.js", - "check:tls": "mocha --config \"test/manual/mocharc.json\" test/manual/tls_support.test.js", - "check:ldap": "mocha --config \"test/manual/mocharc.json\" test/manual/ldap.test.js", - "check:socks5": "mocha --config \"test/manual/mocharc.json\" test/manual/socks5.test.ts", - "check:csfle": "mocha --file test/tools/runner test/integration/client-side-encryption", + "check:dts": "node ./node_modules/typescript/bin/tsc --noEmit mongodb.d.ts && tsd", + "check:test": "mocha --config test/mocha_mongodb.json test/integration", + "check:unit": "mocha test/unit", + "check:ts": "node ./node_modules/typescript/bin/tsc -v && node ./node_modules/typescript/bin/tsc --noEmit", + "check:atlas": "mocha --config test/manual/mocharc.json test/manual/atlas_connectivity.test.js", + "check:adl": "mocha --config test/mocha_mongodb.json test/manual/atlas-data-lake-testing", + "check:aws": "mocha --config test/mocha_mongodb.json test/integration/auth/mongodb_aws.test.js", + "check:ocsp": "mocha --config test/manual/mocharc.json test/manual/ocsp_support.test.js", + "check:kerberos": "mocha --config test/manual/mocharc.json test/manual/kerberos.test.js", + "check:tls": "mocha --config test/manual/mocharc.json test/manual/tls_support.test.js", + "check:ldap": "mocha --config test/manual/mocharc.json test/manual/ldap.test.js", + "check:socks5": "mocha --config test/manual/mocharc.json test/manual/socks5.test.ts", + "check:csfle": "mocha --config test/mocha_mongodb.json test/integration/client-side-encryption", "check:snappy": "mocha test/unit/assorted/snappy.test.js", "prepare": "node etc/prepare.js", "release": "standard-version -i HISTORY.md", diff --git a/test/integration/retryable-writes/retryable_writes.spec.test.js b/test/integration/retryable-writes/retryable_writes.spec.test.js index 5cb797b0b9..acac05c8a2 100644 --- a/test/integration/retryable-writes/retryable_writes.spec.test.js +++ b/test/integration/retryable-writes/retryable_writes.spec.test.js @@ -2,47 +2,64 @@ const { expect } = require('chai'); const { loadSpecTests } = require('../../spec'); -const { parseRunOn } = require('../../tools/spec-runner'); +const { legacyRunOnToRunOnRequirement } = require('../../tools/spec-runner'); +const { topologySatisfies } = require('../../tools/unified-spec-runner/unified-utils'); describe('Retryable Writes', function () { let ctx = {}; - loadSpecTests('retryable-writes').forEach(suite => { - const environmentRequirementList = parseRunOn(suite.runOn); - environmentRequirementList.forEach(requires => { - const suiteName = `${suite.name} - ${requires.topology.join()}`; - - describe(suiteName, { - metadata: { requires }, - test: function () { - // Step 3: Test Teardown. Turn off failpoints, and close client - afterEach(function () { - if (!ctx.db || !ctx.client) { - return; - } + const retryableWrites = loadSpecTests('retryable-writes'); + + for (const suite of retryableWrites) { + describe(suite.name, function () { + beforeEach(async function () { + let utilClient; + if (this.configuration.isLoadBalanced) { + // The util client can always point at the single mongos LB frontend. + utilClient = this.configuration.newClient(this.configuration.singleMongosLoadBalancerUri); + } else { + utilClient = this.configuration.newClient(); + } - return Promise.resolve() - .then(() => - ctx.failPointName ? turnOffFailPoint(ctx.client, ctx.failPointName) : {} - ) - .then(() => ctx.client.close()) - .then(() => (ctx = {})); - }); + await utilClient.connect(); - suite.tests.forEach(test => { - it(test.description, function () { - // Step 1: Test Setup. Includes a lot of boilerplate stuff - // like creating a client, dropping and refilling data collections, - // and enabling failpoints - return executeScenarioSetup(suite, test, this.configuration, ctx).then(() => - // Step 2: Run the test - executeScenarioTest(test, ctx) - ); - }); - }); + const allRequirements = suite.runOn.map(legacyRunOnToRunOnRequirement); + + let shouldRun = true; + for (const requirement of allRequirements) { + shouldRun = + shouldRun && (await topologySatisfies(this.currentTest.ctx, requirement, utilClient)); + } + + await utilClient.close(); + + if (!shouldRun) this.skip(); + }); + + afterEach(async function () { + // Step 3: Test Teardown. Turn off failpoints, and close client + if (!ctx.db || !ctx.client) { + return; + } + + if (ctx.failPointName) { + await turnOffFailPoint(ctx.client, ctx.failPointName); } + await ctx.client.close(); + ctx = {}; // reset context }); + + for (const test of suite.tests) { + it(test.description, async function () { + // Step 1: Test Setup. Includes a lot of boilerplate stuff + // like creating a client, dropping and refilling data collections, + // and enabling failpoints + await executeScenarioSetup(suite, test, this.configuration, ctx); + // Step 2: Run the test + await executeScenarioTest(test, ctx); + }); + } }); - }); + } }); function executeScenarioSetup(scenario, test, config, ctx) { diff --git a/test/integration/server-discovery-and-monitoring/server_discovery_and_monitoring.spec.test.js b/test/integration/server-discovery-and-monitoring/server_discovery_and_monitoring.spec.test.js index 4f05a4603b..fad3df98af 100644 --- a/test/integration/server-discovery-and-monitoring/server_discovery_and_monitoring.spec.test.js +++ b/test/integration/server-discovery-and-monitoring/server_discovery_and_monitoring.spec.test.js @@ -39,12 +39,23 @@ class SDAMRunnerContext extends TestRunnerContext { } describe('SDAM', function () { - context('integration spec tests', function () { + describe('integration spec tests', function () { const testContext = new SDAMRunnerContext(); const testSuites = loadSpecTests('server-discovery-and-monitoring/integration'); - after(() => testContext.teardown()); - before(function () { - return testContext.setup(this.configuration); + + beforeEach(async function () { + if (this.configuration.isLoadBalanced) { + this.currentTest.skipReason = 'Cannot run in a loadBalanced environment'; + this.skip(); + } + }); + + beforeEach(async function () { + await testContext.setup(this.configuration); + }); + + afterEach(async () => { + await testContext.teardown(); }); generateTopologyTests(testSuites, testContext); diff --git a/test/manual/atlas-data-lake-testing/atlas_data_lake_testing.spec.js b/test/manual/atlas-data-lake-testing/atlas_data_lake_testing.spec.js index f374213399..9ef22def8f 100644 --- a/test/manual/atlas-data-lake-testing/atlas_data_lake_testing.spec.js +++ b/test/manual/atlas-data-lake-testing/atlas_data_lake_testing.spec.js @@ -24,5 +24,7 @@ describe('Atlas Data Lake - spec', function () { return testContext.setup(this.configuration); }); + for (const suite of testSuites) suite.runOn = []; // patched in for the spec runner + generateTopologyTests(testSuites, testContext); }); diff --git a/test/mocha_mongodb.json b/test/mocha_mongodb.json new file mode 100644 index 0000000000..8129f59d44 --- /dev/null +++ b/test/mocha_mongodb.json @@ -0,0 +1,19 @@ +{ + "$schema": "https://raw.githubusercontent.com/SchemaStore/schemastore/master/src/schemas/json/mocharc.json", + "require": [ + "source-map-support/register", + "ts-node/register", + "test/tools/runner/chai-addons.js", + "test/tools/runner/hooks/configuration.js", + "test/tools/runner/hooks/client_leak_checker.js", + "test/tools/runner/hooks/session_leak_checker.js" + ], + "extension": ["js", "ts"], + "ui": "test/tools/runner/metadata_ui.js", + "recursive": true, + "timeout": 60000, + "failZero": true, + "reporter": "test/tools/reporter/mongodb_reporter.js", + "sort": true, + "color": true +} diff --git a/test/tools/runner/config.js b/test/tools/runner/config.ts similarity index 75% rename from test/tools/runner/config.js rename to test/tools/runner/config.ts index 2ae0bece2a..9818299707 100644 --- a/test/tools/runner/config.js +++ b/test/tools/runner/config.ts @@ -1,32 +1,45 @@ -'use strict'; -const ConnectionString = require('mongodb-connection-string-url').default; -const url = require('url'); -const qs = require('querystring'); -const { expect } = require('chai'); - -const { MongoClient } = require('../../../src/mongo_client'); -const { Topology } = require('../../../src/sdam/topology'); -const { TopologyType } = require('../../../src/sdam/common'); -const { HostAddress } = require('../../../src/utils'); -const { getEnvironmentalOptions } = require('../utils'); - -/** - * @typedef {Object} UrlOptions - * @property {string} [db] - dbName to put in the path section override - * @property {string} [replicaSet] - replicaSet name override - * @property {string} [username] - Username for auth section - * @property {string} [password] - Password for auth section - * @property {string} [authMechanism] - Authmechanism name - * @property {Record} [authMechanismProperties] - additional options for auth mechanism - * @property {string} [authSource] - authSource override in searchParams of URI - * @property {boolean} [useMultipleMongoses] - if set will use concatenate all known HostAddresses in URI - */ - -/** - * @param {Record} obj - */ -function convertToConnStringMap(obj) { - let result = []; +import { expect } from 'chai'; +import ConnectionString from 'mongodb-connection-string-url'; +import * as qs from 'querystring'; +import * as url from 'url'; + +import { AuthMechanism } from '../../../src'; +import { MongoClient } from '../../../src/mongo_client'; +import { TopologyType } from '../../../src/sdam/common'; +import { Topology } from '../../../src/sdam/topology'; +import { HostAddress } from '../../../src/utils'; +import { getEnvironmentalOptions } from '../utils'; + +interface ProxyParams { + proxyHost?: string; + proxyPort?: number; + proxyUsername?: string; + proxyPassword?: string; +} + +interface UrlOptions { + /** name of the default db */ + db?: string; + /** replSet name */ + replicaSet?: string; + /** Username to authenticate with */ + username?: string; + /** Password to authenticate with */ + password?: string; + /** Name of the auth mechanism to use */ + authMechanism?: AuthMechanism; + /** Additional properties used by the mechanism */ + authMechanismProperties?: Record; + /** The database to specify as the authentication source */ + authSource?: string; + /** If set will use concatenate all known HostAddresses in URI */ + useMultipleMongoses?: boolean; + /** Parameters for configuring a proxy connection */ + proxyURIParams?: ProxyParams; +} + +function convertToConnStringMap(obj: Record) { + const result = []; Object.keys(obj).forEach(key => { result.push(`${key}:${obj[key]}`); }); @@ -34,8 +47,30 @@ function convertToConnStringMap(obj) { return result.join(','); } -class TestConfiguration { - constructor(uri, context) { +export class TestConfiguration { + version: string; + clientSideEncryption: Record; + parameters: Record; + singleMongosLoadBalancerUri: string; + multiMongosLoadBalancerUri: string; + isServerless: boolean; + topologyType: TopologyType; + buildInfo: Record; + options: { + hosts?: string[]; + hostAddresses?: HostAddress[]; + hostAddress?: HostAddress; + host?: string; + port?: number; + db?: string; + replicaSet?: string; + authMechanism?: string; + authMechanismProperties?: Record; + auth?: { username: string; password: string; authSource?: string }; + proxyURIParams?: ProxyParams; + }; + + constructor(uri: string, context: Record) { const url = new ConnectionString(uri); const { hosts } = url; const hostAddresses = hosts.map(HostAddress.fromString); @@ -46,6 +81,7 @@ class TestConfiguration { this.multiMongosLoadBalancerUri = context.multiMongosLoadBalancerUri; this.isServerless = !!process.env.SERVERLESS; this.topologyType = this.isLoadBalanced ? TopologyType.LoadBalanced : context.topologyType; + this.buildInfo = context.buildInfo; this.options = { hosts, hostAddresses, @@ -57,7 +93,7 @@ class TestConfiguration { proxyURIParams: url.searchParams.get('proxyHost') ? { proxyHost: url.searchParams.get('proxyHost'), - proxyPort: url.searchParams.get('proxyPort'), + proxyPort: Number(url.searchParams.get('proxyPort')), proxyUsername: url.searchParams.get('proxyUsername'), proxyPassword: url.searchParams.get('proxyPassword') } @@ -108,15 +144,7 @@ class TestConfiguration { return this.options.replicaSet; } - get mongo() { - throw new TypeError('fix this!'); - } - - get require() { - throw new TypeError('fix this!'); - } - - newClient(dbOptions, serverOptions) { + newClient(dbOptions?: string | Record, serverOptions?: Record) { serverOptions = Object.assign( { minHeartbeatFrequencyMS: 100 }, getEnvironmentalOptions(), @@ -170,7 +198,7 @@ class TestConfiguration { dbOptions.loadBalanced = true; } - const urlOptions = { + const urlOptions: url.UrlObject = { protocol: 'mongodb', slashes: true, hostname: dbHost, @@ -191,6 +219,10 @@ class TestConfiguration { if (username) { urlOptions.auth = `${encodeURIComponent(username)}:${encodeURIComponent(password)}`; } + } + + if (typeof urlOptions.query === 'object') { + // Auth goes at the top of the uri, not in the searchParams delete urlOptions.query.auth; } @@ -219,9 +251,9 @@ class TestConfiguration { * Construct a connection URL using nodejs's whatwg URL similar to how connection_string.ts * works * - * @param {UrlOptions} [options] - overrides and settings for URI generation + * @param options - overrides and settings for URI generation */ - url(options) { + url(options?: UrlOptions) { options = { db: this.options.db, replicaSet: this.options.replicaSet, @@ -251,7 +283,7 @@ class TestConfiguration { if (options.password) url.password = options.password; if (this.isLoadBalanced) { - url.searchParams.append('loadBalanced', true); + url.searchParams.append('loadBalanced', 'true'); } if (options.username || options.password) { @@ -270,7 +302,7 @@ class TestConfiguration { url.searchParams.append('authSource', options.authSource); } } else if (this.isServerless) { - url.searchParams.append('ssl', true); + url.searchParams.append('ssl', 'true'); url.searchParams.append('authSource', 'admin'); } @@ -314,7 +346,7 @@ class TestConfiguration { } kmsProviders(type, localKey) { - const kmsProviders = {}; + const kmsProviders: Record = {}; if (typeof type !== 'string' || type === 'aws') { kmsProviders.aws = { accessKeyId: this.clientSideEncryption.AWS_ACCESS_KEY_ID, @@ -329,5 +361,3 @@ class TestConfiguration { return kmsProviders; } } - -module.exports = { TestConfiguration }; diff --git a/test/tools/runner/filters/mongodb_topology_filter.js b/test/tools/runner/filters/mongodb_topology_filter.js index e26e137eff..271b707cb0 100755 --- a/test/tools/runner/filters/mongodb_topology_filter.js +++ b/test/tools/runner/filters/mongodb_topology_filter.js @@ -16,7 +16,6 @@ class MongoDBTopologyFilter { let type = client.topology.description.type; context.topologyType = type; this.runtimeTopology = topologyTypeToString(type); - console.error(`[ topology type: ${this.runtimeTopology} ]`); callback(); } diff --git a/test/tools/runner/filters/mongodb_version_filter.js b/test/tools/runner/filters/mongodb_version_filter.js index aed6994e83..eff44d56a9 100755 --- a/test/tools/runner/filters/mongodb_version_filter.js +++ b/test/tools/runner/filters/mongodb_version_filter.js @@ -1,7 +1,6 @@ 'use strict'; const semver = require('semver'); -const util = require('util'); /** * Filter for the MongoDB version required for the test @@ -30,11 +29,8 @@ class MongoDBVersionFilter { callback(err); return; } - context.version = this.version = result.versionArray.slice(0, 3).join('.'); - console.error('running against mongodb version:'); - console.error(util.inspect(result, { colors: true })); - + context.buildInfo = result; callback(); }); } diff --git a/test/tools/runner/plugins/client_leak_checker.js b/test/tools/runner/hooks/client_leak_checker.js similarity index 91% rename from test/tools/runner/plugins/client_leak_checker.js rename to test/tools/runner/hooks/client_leak_checker.js index 6e485ae158..9053dea213 100644 --- a/test/tools/runner/plugins/client_leak_checker.js +++ b/test/tools/runner/hooks/client_leak_checker.js @@ -29,7 +29,7 @@ function unifiedTopologyIsConnected(client) { ); } -after(function () { +const afterClientLeakHook = function () { const traces = []; const openClientCount = activeClients.reduce((count, client) => { if (unifiedTopologyIsConnected(client)) { @@ -46,4 +46,10 @@ after(function () { } activeClients = []; -}); +}; + +module.exports = { + mochaHooks: { + afterAll: [afterClientLeakHook] + } +}; diff --git a/test/tools/runner/index.js b/test/tools/runner/hooks/configuration.js similarity index 55% rename from test/tools/runner/index.js rename to test/tools/runner/hooks/configuration.js index b332c2bc2a..14c4a92321 100644 --- a/test/tools/runner/index.js +++ b/test/tools/runner/hooks/configuration.js @@ -6,10 +6,10 @@ require('source-map-support').install({ const path = require('path'); const fs = require('fs'); -const { MongoClient } = require('../../../src'); -const { TestConfiguration } = require('./config'); -const { getEnvironmentalOptions } = require('../utils'); -const mock = require('../mongodb-mock/index'); +const { MongoClient } = require('../../../../src'); +const { TestConfiguration } = require('../config'); +const { getEnvironmentalOptions } = require('../../utils'); +const mock = require('../../mongodb-mock/index'); const { inspect } = require('util'); const MONGODB_URI = process.env.MONGODB_URI || 'mongodb://localhost:27017'; @@ -30,11 +30,11 @@ async function initializeFilters(client) { const context = {}; const filterFiles = fs - .readdirSync(path.join(__dirname, 'filters')) + .readdirSync(path.join(__dirname, '../filters')) .filter(x => x.indexOf('js') !== -1); for (const filterName of filterFiles) { - const FilterModule = require(path.join(__dirname, 'filters', filterName)); + const FilterModule = require(path.join(__dirname, '../filters', filterName)); const filter = new FilterModule(); console.assert(typeof filter === 'object'); @@ -52,7 +52,7 @@ async function initializeFilters(client) { return context; } -beforeEach(async function () { +const testSkipBeforeEachHook = async function () { // `metadata` always exists, `requires` is optional const requires = this.currentTest.metadata.requires; if (requires && Object.keys(requires).length > 0) { @@ -72,13 +72,12 @@ beforeEach(async function () { this.skip(); } } -}); +}; -before(async function () { - const client = new MongoClient( - loadBalanced ? SINGLE_MONGOS_LB_URI : MONGODB_URI, - getEnvironmentalOptions() - ); +const testConfigBeforeHook = async function () { + const client = new MongoClient(loadBalanced ? SINGLE_MONGOS_LB_URI : MONGODB_URI, { + ...getEnvironmentalOptions() + }); await client.connect(); @@ -98,15 +97,46 @@ before(async function () { context ); await client.close(); -}); -// ensure all mock connections are closed after the suite is run -after(() => mock.cleanup()); - -// optionally enable test runner-wide plugins -require('./plugins/deferred'); -require('./plugins/session_leak_checker'); -require('./plugins/client_leak_checker'); + const currentEnv = { + // TODO(NODE-3714): Improve environment detection + topology: this.configuration.topologyType, + version: this.configuration.buildInfo.version, + node: process.version, + os: process.platform, + serverless: process.env.SERVERLESS === '1', + auth: process.env.AUTH === 'auth', + tls: process.env.SSL === 'ssl', + csfle: this.configuration.clientSideEncryption.enabled, + serverApi: MONGODB_API_VERSION, + atlas: process.env.ATLAS_CONNECTIVITY != null, + aws: MONGODB_URI.includes('authMechanism=MONGODB-AWS'), + adl: this.configuration.buildInfo.dataLake + ? this.configuration.buildInfo.dataLake.version + : false, + kerberos: process.env.KRB5_PRINCIPAL != null, + ldap: MONGODB_URI.includes('authMechanism=PLAIN'), + ocsp: process.env.OCSP_TLS_SHOULD_SUCCEED != null && process.env.CA_FILE != null, + socks5: MONGODB_URI.includes('proxyHost=') + }; + + console.error(inspect(currentEnv, { colors: true })); +}; -// configure mocha -require('mocha-sinon'); +// ensure all mock connections are closed after the suite is run +const cleanUpMocksAfterHook = () => mock.cleanup(); + +const beforeAllPluginImports = () => { + // optionally enable test runner-wide plugins + require('../plugins/deferred'); + // configure mocha + require('mocha-sinon'); +}; + +module.exports = { + mochaHooks: { + beforeAll: [beforeAllPluginImports, testConfigBeforeHook], + beforeEach: [testSkipBeforeEachHook], + afterAll: [cleanUpMocksAfterHook] + } +}; diff --git a/test/tools/runner/plugins/session_leak_checker.js b/test/tools/runner/hooks/session_leak_checker.js similarity index 88% rename from test/tools/runner/plugins/session_leak_checker.js rename to test/tools/runner/hooks/session_leak_checker.js index b702f0f17f..64b6c53951 100644 --- a/test/tools/runner/plugins/session_leak_checker.js +++ b/test/tools/runner/hooks/session_leak_checker.js @@ -1,6 +1,6 @@ 'use strict'; -const expect = require('chai').expect; +const { expect } = require('chai'); const sinon = require('sinon'); const { Topology } = require('../../../../src/sdam/topology'); const { MongoClient } = require('../../../../src/mongo_client'); @@ -23,14 +23,14 @@ function dumpSessionInfo(which, sessions) { }); } -beforeEach('Session Leak Before Each - Set up clean test environment', () => { +const sessionLeakCheckBeforeEachEnvReset = () => { sandbox.restore(); activeSessions = new Set(); pooledSessions = new Set(); activeSessionsBeforeClose = new Set(); -}); +}; -beforeEach('Session Leak Before Each - setup session tracking', function () { +const sessionLeakCheckBeforeEach = function () { if (!this.currentTest || getSessionLeakMetadata(this.currentTest).skipLeakTests) { return; } @@ -92,9 +92,9 @@ beforeEach('Session Leak Before Each - setup session tracking', function () { activeSessionsBeforeClose = new Set(activeSessions); return _close.apply(this, arguments); }); -}); +}; -afterEach('Session Leak After Each - ensure no leaks', function () { +const sessionLeakCheckAfterEach = function () { if ( this.currentTest == null || this.currentTest.state === 'failed' || @@ -103,6 +103,8 @@ afterEach('Session Leak After Each - ensure no leaks', function () { return; } + const testName = this.currentTest.fullTitle(); + try { if (activeSessionsBeforeClose.size) { dumpSessionInfo('active sessions before `close`', activeSessionsBeforeClose); @@ -123,12 +125,12 @@ afterEach('Session Leak After Each - ensure no leaks', function () { ).to.equal(0); if (pooledSessions.size) { - dumpSessionInfo('pooled sessions', pooledSessions); + dumpSessionInfo(`pooled sessions in "${testName}"`, pooledSessions); } expect( pooledSessions.size, - `client close failed to clean up ${pooledSessions.size} pooled sessions` + `client close failed to clean up ${pooledSessions.size} pooled sessions, did you use an after hook to clean up a client?` ).to.equal(0); } catch (e) { if (activeSessions) activeSessions.clear(); @@ -136,4 +138,11 @@ afterEach('Session Leak After Each - ensure no leaks', function () { if (activeSessionsBeforeClose) activeSessionsBeforeClose.clear(); this.test.error(e); } -}); +}; + +module.exports = { + mochaHooks: { + beforeEach: [sessionLeakCheckBeforeEachEnvReset, sessionLeakCheckBeforeEach], + afterEach: [sessionLeakCheckAfterEach] + } +}; diff --git a/test/tools/spec-runner/context.js b/test/tools/spec-runner/context.js index 6b6e76c95c..9c098f9722 100644 --- a/test/tools/spec-runner/context.js +++ b/test/tools/spec-runner/context.js @@ -63,8 +63,9 @@ class TestRunnerContext { this.threads = new Map(); } - runForAllClients(fn) { - const allClients = [this.sharedClient].concat(this.failPointClients); + async runForAllClients(fn) { + const allClients = [...this.failPointClients]; + if (this.sharedClient) allClients.unshift(this.sharedClient); return Promise.all(allClients.map(fn)); } @@ -87,11 +88,8 @@ class TestRunnerContext { return this.runForAllClients(client => client.connect()); } - teardown() { - return Promise.all([ - this.runForAllClients(client => client.close()), - this.sharedClient.close() - ]); + async teardown() { + await this.runForAllClients(client => client.close()); } cleanupAfterSuite() { diff --git a/test/tools/spec-runner/index.js b/test/tools/spec-runner/index.js index c2d8310165..8067eaf4fd 100644 --- a/test/tools/spec-runner/index.js +++ b/test/tools/spec-runner/index.js @@ -8,8 +8,8 @@ const { EJSON } = require('bson'); const { isRecord } = require('../../../src/utils'); const TestRunnerContext = require('./context').TestRunnerContext; const resolveConnectionString = require('./utils').resolveConnectionString; -const { shouldRunServerlessTest } = require('../../tools/utils'); const { LEGACY_HELLO_COMMAND } = require('../../../src/constants'); +const { topologySatisfies } = require('../unified-spec-runner/unified-utils'); // Promise.try alternative https://stackoverflow.com/questions/60624081/promise-try-without-bluebird/60624164?noredirect=1#comment107255389_60624164 function promiseTry(callback) { @@ -92,109 +92,103 @@ function gatherTestSuites(specPath) { ); } -function parseTopologies(topologies) { - if (topologies == null) { - return ['replicaset', 'sharded', 'single']; +/** + * Transforms the legacy specification into the unified format specification + * **NOTE:** Called directly as a .map() callback + * @param {Record} runOn - a legacy runOn specification + * @returns {import('../unified-spec-runner/schema').RunOnRequirement} + */ +function legacyRunOnToRunOnRequirement(runOn) { + const runOnRequirement = { ...runOn }; + + if (typeof runOn.topology !== 'undefined') { + runOnRequirement.topologies = runOn.topology; + } + + if (typeof runOn.authEnabled !== 'undefined') { + runOnRequirement.auth = runOn.authEnabled; } - return topologies; + return runOnRequirement; } -function parseRunOn(runOn) { - return runOn.map(config => { - const topology = parseTopologies(config.topology); - const version = []; - if (config.minServerVersion) { - version.push(`>= ${config.minServerVersion}`); +function generateTopologyTests(testSuites, testContext, filter) { + for (const testSuite of testSuites) { + let runOn = testSuite.runOn; + if (!testSuite.runOn && !Array.isArray(runOn)) { + throw new Error('no runOn requirement? it should be required'); } - if (config.maxServerVersion) { - version.push(`<= ${config.maxServerVersion}`); - } + const beforeEachFilter = async function () { + let utilClient; + if (this.configuration.isLoadBalanced) { + // The util client can always point at the single mongos LB frontend. + utilClient = this.configuration.newClient(this.configuration.singleMongosLoadBalancerUri); + } else { + utilClient = this.configuration.newClient(); + } - const mongodb = version.join(' '); - return { - topology, - mongodb, - authEnabled: !!config.authEnabled, - serverless: config.serverless - }; - }); -} + await utilClient.connect(); -function generateTopologyTests(testSuites, testContext, filter) { - testSuites.forEach(testSuite => { - // TODO: remove this when SPEC-1255 is completed - let runOn = testSuite.runOn; - if (!testSuite.runOn) { - runOn = [{ minServerVersion: testSuite.minServerVersion }]; - if (testSuite.maxServerVersion) { - runOn.push({ maxServerVersion: testSuite.maxServerVersion }); + const allRequirements = runOn.map(legacyRunOnToRunOnRequirement); + + let shouldRun = true; + for (const requirement of allRequirements) { + shouldRun = + shouldRun && (await topologySatisfies(this.currentTest.ctx, requirement, utilClient)); } - } - const environmentRequirementList = parseRunOn(runOn); - - environmentRequirementList.forEach(requires => { - const suiteName = `${testSuite.name} - ${requires.topology.join()}`; - describe(suiteName, { - metadata: { requires }, - test: function () { - beforeEach(() => prepareDatabaseForSuite(testSuite, testContext)); - afterEach(() => testContext.cleanupAfterSuite()); - testSuite.tests.forEach(spec => { - const maybeIt = shouldRunSpecTest(this.configuration, requires, spec, filter) - ? it - : it.skip; - maybeIt(spec.description, function () { - let testPromise = Promise.resolve(); - if (spec.failPoint) { - testPromise = testPromise.then(() => testContext.enableFailPoint(spec.failPoint)); - } - - // run the actual test - testPromise = testPromise.then(() => - runTestSuiteTest(this.configuration, spec, testContext) - ); - - if (spec.failPoint) { - testPromise = testPromise.then(() => testContext.disableFailPoint(spec.failPoint)); - } - return testPromise.then(() => validateOutcome(spec, testContext)); - }); - }); - } - }); - }); - }); -} -function shouldRunSpecTest(configuration, requires, spec, filter) { - if (requires.authEnabled && process.env.AUTH !== 'auth') { - // TODO(NODE-3488): We do not have a way to determine if auth is enabled in our mocha metadata - // We need to do a admin.command({getCmdLineOpts: 1}) if it errors (code=13) auth is on - return false; - } + const spec = this.currentTest.spec; + + if ( + shouldRun && + spec.operations.some( + op => op.name === 'waitForEvent' && op.arguments.event === 'PoolReadyEvent' + ) + ) { + this.currentTest.skipReason = + 'TODO(NODE-2994): Connection storms work will add new events to connection pool'; + shouldRun = false; + } - if ( - requires.serverless && - !shouldRunServerlessTest(requires.serverless, !!process.env.SERVERLESS) - ) { - return false; - } + if (shouldRun && spec.skipReason) { + this.currentTest.skipReason = spec.skipReason; + shouldRun = false; + } - if ( - spec.operations.some( - op => op.name === 'waitForEvent' && op.arguments.event === 'PoolReadyEvent' - ) - ) { - // TODO(NODE-2994): Connection storms work will add new events to connection pool - return false; - } + if (typeof filter === 'function' && !filter(spec, this.configuration)) { + this.currentTest.skipReason = `filtered by custom filter passed to generateTopologyTests`; + shouldRun = false; + } + await utilClient.close(); - if (spec.skipReason || (filter && typeof filter === 'function' && !filter(spec, configuration))) { - return false; + if (!shouldRun) this.skip(); + }; + + describe(testSuite.name, function () { + beforeEach(beforeEachFilter); + beforeEach(() => prepareDatabaseForSuite(testSuite, testContext)); + afterEach(() => testContext.cleanupAfterSuite()); + for (const spec of testSuite.tests) { + const mochaTest = it(spec.description, async function () { + if (spec.failPoint) { + await testContext.enableFailPoint(spec.failPoint); + } + + // run the actual test + await runTestSuiteTest(this.configuration, spec, testContext); + + if (spec.failPoint) { + await testContext.disableFailPoint(spec.failPoint); + } + + await validateOutcome(spec, testContext); + }); + // Make the spec test available to the beforeEach filter + mochaTest.spec = spec; + } + }); } - return true; } // Test runner helpers @@ -880,5 +874,5 @@ module.exports = { TestRunnerContext, gatherTestSuites, generateTopologyTests, - parseRunOn + legacyRunOnToRunOnRequirement }; diff --git a/test/tools/unified-spec-runner/entities.ts b/test/tools/unified-spec-runner/entities.ts index c4855dab24..586e905bd0 100644 --- a/test/tools/unified-spec-runner/entities.ts +++ b/test/tools/unified-spec-runner/entities.ts @@ -32,7 +32,8 @@ import { ReadPreference } from '../../../src/read_preference'; import { ClientSession } from '../../../src/sessions'; import { WriteConcern } from '../../../src/write_concern'; import { ejson, getEnvironmentalOptions } from '../../tools/utils'; -import { TestConfiguration, trace } from './runner'; +import type { TestConfiguration } from '../runner/config'; +import { trace } from './runner'; import type { ClientEntity, EntityDescription } from './schema'; import { makeConnectionString, patchCollectionOptions, patchDbOptions } from './unified-utils'; diff --git a/test/tools/unified-spec-runner/runner.ts b/test/tools/unified-spec-runner/runner.ts index eff46d7711..932af6dddf 100644 --- a/test/tools/unified-spec-runner/runner.ts +++ b/test/tools/unified-spec-runner/runner.ts @@ -12,10 +12,6 @@ import { executeOperationAndCheck } from './operations'; import * as uni from './schema'; import { patchVersion, topologySatisfies, zip } from './unified-utils'; -export type TestConfiguration = InstanceType< - typeof import('../../tools/runner/config')['TestConfiguration'] ->; - export function trace(message: string): void { if (process.env.UTR_TRACE) { console.error(` > ${message}`); @@ -57,7 +53,7 @@ export async function runUnifiedTest( // If test.skipReason is specified, the test runner MUST skip this // test and MAY use the string value to log a message. if (test.skipReason) { - console.warn(`Skipping test ${test.description}: ${test.skipReason}.`); + ctx.skipReason = test.skipReason; ctx.skip(); } diff --git a/test/tools/unified-spec-runner/schema.ts b/test/tools/unified-spec-runner/schema.ts index 2bad64a6b9..1c6847c59f 100644 --- a/test/tools/unified-spec-runner/schema.ts +++ b/test/tools/unified-spec-runner/schema.ts @@ -19,7 +19,7 @@ export interface OperationDescription { export interface UnifiedSuite { description: string; schemaVersion: string; - runOnRequirements?: [RunOnRequirement, ...RunOnRequirement[]]; + runOnRequirements?: RunOnRequirement[]; createEntities?: [EntityDescription, ...EntityDescription[]]; initialData?: [CollectionData, ...CollectionData[]]; tests: [Test, ...Test[]]; @@ -34,8 +34,8 @@ export const TopologyType = Object.freeze({ } as const); export type TopologyId = typeof TopologyType[keyof typeof TopologyType]; export interface RunOnRequirement { - serverless: 'forbid' | 'allow' | 'require'; - auth: boolean; + serverless?: 'forbid' | 'allow' | 'require'; + auth?: boolean; maxServerVersion?: string; minServerVersion?: string; topologies?: TopologyId[]; diff --git a/test/tools/unified-spec-runner/unified-utils.ts b/test/tools/unified-spec-runner/unified-utils.ts index fa418bb80b..166bbfcbad 100644 --- a/test/tools/unified-spec-runner/unified-utils.ts +++ b/test/tools/unified-spec-runner/unified-utils.ts @@ -44,6 +44,10 @@ export async function topologySatisfies( LoadBalanced: 'load-balanced' }[config.topologyType]; + if (!Array.isArray(r.topologies)) { + throw new Error('Topology specification must be an array'); + } + if (r.topologies.includes('sharded-replicaset') && topologyType === 'sharded') { const shards = await utilClient.db('config').collection('shards').find({}).toArray(); ok &&= shards.length > 0 && shards.every(shard => shard.host.split(',').length > 1); @@ -93,7 +97,15 @@ export async function topologySatisfies( if (!ok && skipReason == null) skipReason = `has serverless set to ${r.serverless}`; } - if (!ok && skipReason != null && ctx.test) ctx.test.skipReason = skipReason; + if (!ok && skipReason != null) { + if (ctx.currentTest) { + // called from beforeEach hook + ctx.currentTest.skipReason = skipReason; + } else if (ctx.test) { + // called from within a test + ctx.test.skipReason = skipReason; + } + } return ok; } diff --git a/test/unit/assorted/server_discovery_and_monitoring.spec.test.js b/test/unit/assorted/server_discovery_and_monitoring.spec.test.js index 1a949f3eea..b6433c4f7a 100644 --- a/test/unit/assorted/server_discovery_and_monitoring.spec.test.js +++ b/test/unit/assorted/server_discovery_and_monitoring.spec.test.js @@ -69,11 +69,8 @@ describe('Server Discovery and Monitoring (spec)', function () { specTests[specTestName].forEach(testData => { const skip = shouldSkip(testData.description); const type = skip ? it.skip : it; - type(testData.description, { - metadata: { requires: { topology: 'single' } }, - test: function (done) { - executeSDAMTest(testData, done); - } + type(testData.description, function (done) { + executeSDAMTest(testData, done); }); }); }); diff --git a/test/unit/assorted/snappy.test.js b/test/unit/assorted/snappy.test.js index 6072550d5a..6bb77f04de 100644 --- a/test/unit/assorted/snappy.test.js +++ b/test/unit/assorted/snappy.test.js @@ -26,7 +26,7 @@ describe('Compression', function () { client = new MongoClient(`mongodb://${server.uri()}`, { compressors: 'snappy' }); }); - after(async function () { + afterEach(async function () { if (server) await mock.cleanup(); if (client) await client.close(); });