Socket
Socket
Sign inDemoInstall

mongodb

Package Overview
Dependencies
208
Maintainers
8
Versions
511
Alerts
File Explorer

Advanced tools

Install Socket

Detect and block malicious and high-risk dependencies

Install

Comparing version 6.6.2 to 6.7.0-dev.20240530.sha.f56938f

lib/cmap/auth/mongodb_oidc/automated_callback_workflow.js

27

lib/client-side-encryption/providers/azure.js
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.loadAzureCredentials = exports.fetchAzureKMSToken = exports.prepareRequest = exports.tokenCache = exports.AzureCredentialCache = void 0;
exports.loadAzureCredentials = exports.fetchAzureKMSToken = exports.prepareRequest = exports.addAzureParams = exports.tokenCache = exports.AzureCredentialCache = exports.AZURE_BASE_URL = void 0;
const error_1 = require("../../error");
const utils_1 = require("../../utils");
const errors_1 = require("../errors");
const utils_1 = require("./utils");
const MINIMUM_TOKEN_REFRESH_IN_MILLISECONDS = 6000;
/** Base URL for getting Azure tokens. */
exports.AZURE_BASE_URL = 'http://169.254.169.254/metadata/identity/oauth2/token?';
/**

@@ -71,2 +74,15 @@ * @internal

* @internal
* Get the Azure endpoint URL.
*/
function addAzureParams(url, resource, username) {
url.searchParams.append('api-version', '2018-02-01');
url.searchParams.append('resource', resource);
if (username) {
url.searchParams.append('client_id', username);
}
return url;
}
exports.addAzureParams = addAzureParams;
/**
* @internal
*

@@ -77,5 +93,4 @@ * parses any options provided by prose tests to `fetchAzureKMSToken` and merges them with

function prepareRequest(options) {
const url = new URL(options.url?.toString() ?? 'http://169.254.169.254/metadata/identity/oauth2/token');
url.searchParams.append('api-version', '2018-02-01');
url.searchParams.append('resource', 'https://vault.azure.net');
const url = new URL(options.url?.toString() ?? exports.AZURE_BASE_URL);
addAzureParams(url, 'https://vault.azure.net');
const headers = { ...options.headers, 'Content-Type': 'application/json', Metadata: true };

@@ -102,3 +117,3 @@ return { headers, url };

catch (error) {
if (error instanceof errors_1.MongoCryptKMSRequestNetworkTimeoutError) {
if (error instanceof error_1.MongoNetworkTimeoutError) {
throw new errors_1.MongoCryptAzureKMSRequestError(`[Azure KMS] ${error.message}`);

@@ -105,0 +120,0 @@ }

@@ -25,3 +25,7 @@ "use strict";

}
const ALLOWED_PROVIDER_NAMES = ['aws', 'azure'];
const ALLOWED_ENVIRONMENT_NAMES = [
'test',
'azure',
'gcp'
];
const ALLOWED_HOSTS_ERROR = 'Auth mechanism property ALLOWED_HOSTS must be an array of strings.';

@@ -31,2 +35,3 @@ /** @internal */

'*.mongodb.net',
'*.mongodb-qa.net',
'*.mongodb-dev.net',

@@ -39,3 +44,3 @@ '*.mongodbgov.net',

/** Error for when the token audience is missing in the environment. */
const TOKEN_AUDIENCE_MISSING_ERROR = 'TOKEN_AUDIENCE must be set in the auth mechanism properties when PROVIDER_NAME is azure.';
const TOKEN_RESOURCE_MISSING_ERROR = 'TOKEN_RESOURCE must be set in the auth mechanism properties when ENVIRONMENT is azure or gcp.';
/**

@@ -115,20 +120,23 @@ * A representation of the credentials used by MongoDB

if (this.mechanism === providers_1.AuthMechanism.MONGODB_OIDC) {
if (this.username && this.mechanismProperties.PROVIDER_NAME) {
throw new error_1.MongoInvalidArgumentError(`username and PROVIDER_NAME may not be used together for mechanism '${this.mechanism}'.`);
if (this.username &&
this.mechanismProperties.ENVIRONMENT &&
this.mechanismProperties.ENVIRONMENT !== 'azure') {
throw new error_1.MongoInvalidArgumentError(`username and ENVIRONMENT '${this.mechanismProperties.ENVIRONMENT}' may not be used together for mechanism '${this.mechanism}'.`);
}
if (this.mechanismProperties.PROVIDER_NAME === 'azure' &&
!this.mechanismProperties.TOKEN_AUDIENCE) {
throw new error_1.MongoAzureError(TOKEN_AUDIENCE_MISSING_ERROR);
if (this.username && this.password) {
throw new error_1.MongoInvalidArgumentError(`No password is allowed in ENVIRONMENT '${this.mechanismProperties.ENVIRONMENT}' for '${this.mechanism}'.`);
}
if (this.mechanismProperties.PROVIDER_NAME &&
!ALLOWED_PROVIDER_NAMES.includes(this.mechanismProperties.PROVIDER_NAME)) {
throw new error_1.MongoInvalidArgumentError(`Currently only a PROVIDER_NAME in ${ALLOWED_PROVIDER_NAMES.join(',')} is supported for mechanism '${this.mechanism}'.`);
if ((this.mechanismProperties.ENVIRONMENT === 'azure' ||
this.mechanismProperties.ENVIRONMENT === 'gcp') &&
!this.mechanismProperties.TOKEN_RESOURCE) {
throw new error_1.MongoInvalidArgumentError(TOKEN_RESOURCE_MISSING_ERROR);
}
if (this.mechanismProperties.REFRESH_TOKEN_CALLBACK &&
!this.mechanismProperties.REQUEST_TOKEN_CALLBACK) {
throw new error_1.MongoInvalidArgumentError(`A REQUEST_TOKEN_CALLBACK must be provided when using a REFRESH_TOKEN_CALLBACK for mechanism '${this.mechanism}'`);
if (this.mechanismProperties.ENVIRONMENT &&
!ALLOWED_ENVIRONMENT_NAMES.includes(this.mechanismProperties.ENVIRONMENT)) {
throw new error_1.MongoInvalidArgumentError(`Currently only a ENVIRONMENT in ${ALLOWED_ENVIRONMENT_NAMES.join(',')} is supported for mechanism '${this.mechanism}'.`);
}
if (!this.mechanismProperties.PROVIDER_NAME &&
!this.mechanismProperties.REQUEST_TOKEN_CALLBACK) {
throw new error_1.MongoInvalidArgumentError(`Either a PROVIDER_NAME or a REQUEST_TOKEN_CALLBACK must be specified for mechanism '${this.mechanism}'.`);
if (!this.mechanismProperties.ENVIRONMENT &&
!this.mechanismProperties.OIDC_CALLBACK &&
!this.mechanismProperties.OIDC_HUMAN_CALLBACK) {
throw new error_1.MongoInvalidArgumentError(`Either a ENVIRONMENT, OIDC_CALLBACK, or OIDC_HUMAN_CALLBACK must be specified for mechanism '${this.mechanism}'.`);
}

@@ -135,0 +143,0 @@ if (this.mechanismProperties.ALLOWED_HOSTS) {

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.MongoDBOIDC = exports.OIDC_WORKFLOWS = void 0;
exports.MongoDBOIDC = exports.OIDC_WORKFLOWS = exports.OIDC_VERSION = void 0;
const error_1 = require("../../error");
const auth_provider_1 = require("./auth_provider");
const aws_service_workflow_1 = require("./mongodb_oidc/aws_service_workflow");
const azure_service_workflow_1 = require("./mongodb_oidc/azure_service_workflow");
const callback_workflow_1 = require("./mongodb_oidc/callback_workflow");
const azure_machine_workflow_1 = require("./mongodb_oidc/azure_machine_workflow");
const gcp_machine_workflow_1 = require("./mongodb_oidc/gcp_machine_workflow");
const token_cache_1 = require("./mongodb_oidc/token_cache");
const token_machine_workflow_1 = require("./mongodb_oidc/token_machine_workflow");
/** Error when credentials are missing. */
const MISSING_CREDENTIALS_ERROR = 'AuthContext must provide credentials.';
/** The current version of OIDC implementation. */
exports.OIDC_VERSION = 1;
/** @internal */
exports.OIDC_WORKFLOWS = new Map();
exports.OIDC_WORKFLOWS.set('callback', new callback_workflow_1.CallbackWorkflow());
exports.OIDC_WORKFLOWS.set('aws', new aws_service_workflow_1.AwsServiceWorkflow());
exports.OIDC_WORKFLOWS.set('azure', new azure_service_workflow_1.AzureServiceWorkflow());
exports.OIDC_WORKFLOWS.set('test', () => new token_machine_workflow_1.TokenMachineWorkflow(new token_cache_1.TokenCache()));
exports.OIDC_WORKFLOWS.set('azure', () => new azure_machine_workflow_1.AzureMachineWorkflow(new token_cache_1.TokenCache()));
exports.OIDC_WORKFLOWS.set('gcp', () => new gcp_machine_workflow_1.GCPMachineWorkflow(new token_cache_1.TokenCache()));
/**
* OIDC auth provider.
* @experimental
*/

@@ -24,4 +26,8 @@ class MongoDBOIDC extends auth_provider_1.AuthProvider {

*/
constructor() {
constructor(workflow) {
super();
if (!workflow) {
throw new error_1.MongoInvalidArgumentError('No workflow provided to the OIDC auth provider.');
}
this.workflow = workflow;
}

@@ -33,5 +39,12 @@ /**

const { connection, reauthenticating, response } = authContext;
if (response?.speculativeAuthenticate?.done) {
return;
}
const credentials = getCredentials(authContext);
const workflow = getWorkflow(credentials);
await workflow.execute(connection, credentials, reauthenticating, response);
if (reauthenticating) {
await this.workflow.reauthenticate(connection, credentials);
}
else {
await this.workflow.execute(connection, credentials, response);
}
}

@@ -42,5 +55,5 @@ /**

async prepare(handshakeDoc, authContext) {
const { connection } = authContext;
const credentials = getCredentials(authContext);
const workflow = getWorkflow(credentials);
const result = await workflow.speculativeAuth(credentials);
const result = await this.workflow.speculativeAuth(connection, credentials);
return { ...handshakeDoc, ...result };

@@ -60,13 +73,2 @@ }

}
/**
* Gets either a device workflow or callback workflow.
*/
function getWorkflow(credentials) {
const providerName = credentials.mechanismProperties.PROVIDER_NAME;
const workflow = exports.OIDC_WORKFLOWS.get(providerName || 'callback');
if (!workflow) {
throw new error_1.MongoInvalidArgumentError(`Could not load workflow for provider ${credentials.mechanismProperties.PROVIDER_NAME}`);
}
return workflow;
}
//# sourceMappingURL=mongodb_oidc.js.map
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.CallbackWorkflow = void 0;
const bson_1 = require("bson");
exports.CallbackWorkflow = exports.AUTOMATED_TIMEOUT_MS = exports.HUMAN_TIMEOUT_MS = void 0;
const promises_1 = require("timers/promises");
const error_1 = require("../../../error");
const utils_1 = require("../../../utils");
const providers_1 = require("../providers");
const callback_lock_cache_1 = require("./callback_lock_cache");
const token_entry_cache_1 = require("./token_entry_cache");
/** The current version of OIDC implementation. */
const OIDC_VERSION = 0;
/** 5 minutes in seconds */
const TIMEOUT_S = 300;
const command_builders_1 = require("./command_builders");
/** 5 minutes in milliseconds */
exports.HUMAN_TIMEOUT_MS = 300000;
/** 1 minute in milliseconds */
exports.AUTOMATED_TIMEOUT_MS = 60000;
/** Properties allowed on results of callbacks. */

@@ -18,2 +16,4 @@ const RESULT_PROPERTIES = ['accessToken', 'expiresInSeconds', 'refreshToken'];

const CALLBACK_RESULT_ERROR = 'User provided OIDC callbacks must return a valid object with an accessToken.';
/** The time to throttle callback calls. */
const THROTTLE_MS = 100;
/**

@@ -25,7 +25,8 @@ * OIDC implementation of a callback based workflow.

/**
* Instantiate the workflow
* Instantiate the callback workflow.
*/
constructor() {
this.cache = new token_entry_cache_1.TokenEntryCache();
this.callbackCache = new callback_lock_cache_1.CallbackLockCache();
constructor(cache, callback) {
this.cache = cache;
this.callback = this.withLock(callback);
this.lastExecutionTime = Date.now() - THROTTLE_MS;
}

@@ -36,56 +37,37 @@ /**

*/
async speculativeAuth(credentials) {
const document = startCommandDocument(credentials);
document.db = credentials.source;
return { speculativeAuthenticate: document };
async speculativeAuth(connection, credentials) {
// Check if the Client Cache has an access token.
// If it does, cache the access token in the Connection Cache and send a JwtStepRequest
// with the cached access token in the speculative authentication SASL payload.
if (this.cache.hasAccessToken) {
const accessToken = this.cache.getAccessToken();
connection.accessToken = accessToken;
const document = (0, command_builders_1.finishCommandDocument)(accessToken);
document.db = credentials.source;
return { speculativeAuthenticate: document };
}
return {};
}
/**
* Execute the OIDC callback workflow.
* Reauthenticate the callback workflow. For this we invalidated the access token
* in the cache and run the authentication steps again. No initial handshake needs
* to be sent.
*/
async execute(connection, credentials, reauthenticating, response) {
// Get the callbacks with locks from the callback lock cache.
const { requestCallback, refreshCallback, callbackHash } = this.callbackCache.getEntry(connection, credentials);
// Look for an existing entry in the cache.
const entry = this.cache.getEntry(connection.address, credentials.username, callbackHash);
let result;
if (entry) {
// Reauthentication cannot use a token from the cache since the server has
// stated it is invalid by the request for reauthentication.
if (entry.isValid() && !reauthenticating) {
// Presence of a valid cache entry means we can skip to the finishing step.
result = await this.finishAuthentication(connection, credentials, entry.tokenResult, response?.speculativeAuthenticate?.conversationId);
async reauthenticate(connection, credentials) {
if (this.cache.hasAccessToken) {
// Reauthentication implies the token has expired.
if (connection.accessToken === this.cache.getAccessToken()) {
// If connection's access token is the same as the cache's, remove
// the token from the cache and connection.
this.cache.removeAccessToken();
delete connection.accessToken;
}
else {
// Presence of an expired cache entry means we must fetch a new one and
// then execute the final step.
const tokenResult = await this.fetchAccessToken(connection, credentials, entry.serverInfo, reauthenticating, callbackHash, requestCallback, refreshCallback);
try {
result = await this.finishAuthentication(connection, credentials, tokenResult, reauthenticating ? undefined : response?.speculativeAuthenticate?.conversationId);
}
catch (error) {
// If we are reauthenticating and this errors with reauthentication
// required, we need to do the entire process over again and clear
// the cache entry.
if (reauthenticating &&
error instanceof error_1.MongoError &&
error.code === error_1.MONGODB_ERROR_CODES.Reauthenticate) {
this.cache.deleteEntry(connection.address, credentials.username, callbackHash);
result = await this.execute(connection, credentials, reauthenticating);
}
else {
throw error;
}
}
// If the connection's access token is different from the cache's, set
// the cache's token on the connection and do not remove from the
// cache.
connection.accessToken = this.cache.getAccessToken();
}
}
else {
// No entry in the cache requires us to do all authentication steps
// from start to finish, including getting a fresh token for the cache.
const startDocument = await this.startAuthentication(connection, credentials, reauthenticating, response);
const conversationId = startDocument.conversationId;
const serverResult = bson_1.BSON.deserialize(startDocument.payload.buffer);
const tokenResult = await this.fetchAccessToken(connection, credentials, serverResult, reauthenticating, callbackHash, requestCallback, refreshCallback);
result = await this.finishAuthentication(connection, credentials, tokenResult, conversationId);
}
return result;
await this.execute(connection, credentials);
}

@@ -97,9 +79,9 @@ /**

*/
async startAuthentication(connection, credentials, reauthenticating, response) {
async startAuthentication(connection, credentials, response) {
let result;
if (!reauthenticating && response?.speculativeAuthenticate) {
if (response?.speculativeAuthenticate) {
result = response.speculativeAuthenticate;
}
else {
result = await connection.command((0, utils_1.ns)(credentials.source), startCommandDocument(credentials), undefined);
result = await connection.command((0, utils_1.ns)(credentials.source), (0, command_builders_1.startCommandDocument)(credentials), undefined);
}

@@ -111,72 +93,44 @@ return result;

*/
async finishAuthentication(connection, credentials, tokenResult, conversationId) {
const result = await connection.command((0, utils_1.ns)(credentials.source), finishCommandDocument(tokenResult.accessToken, conversationId), undefined);
return result;
async finishAuthentication(connection, credentials, token, conversationId) {
await connection.command((0, utils_1.ns)(credentials.source), (0, command_builders_1.finishCommandDocument)(token, conversationId), undefined);
}
/**
* Fetches an access token using either the request or refresh callbacks and
* puts it in the cache.
* Executes the callback and validates the output.
*/
async fetchAccessToken(connection, credentials, serverInfo, reauthenticating, callbackHash, requestCallback, refreshCallback) {
// Get the token from the cache.
const entry = this.cache.getEntry(connection.address, credentials.username, callbackHash);
let result;
const context = { timeoutSeconds: TIMEOUT_S, version: OIDC_VERSION };
// Check if there's a token in the cache.
if (entry) {
// If the cache entry is valid, return the token result.
if (entry.isValid() && !reauthenticating) {
return entry.tokenResult;
}
// If the cache entry is not valid, remove it from the cache and first attempt
// to use the refresh callback to get a new token. If no refresh callback
// exists, then fallback to the request callback.
if (refreshCallback) {
context.refreshToken = entry.tokenResult.refreshToken;
result = await refreshCallback(serverInfo, context);
}
else {
result = await requestCallback(serverInfo, context);
}
}
else {
// With no token in the cache we use the request callback.
result = await requestCallback(serverInfo, context);
}
async executeAndValidateCallback(params) {
const result = await this.callback(params);
// Validate that the result returned by the callback is acceptable. If it is not
// we must clear the token result from the cache.
if (isCallbackResultInvalid(result)) {
this.cache.deleteEntry(connection.address, credentials.username, callbackHash);
throw new error_1.MongoMissingCredentialsError(CALLBACK_RESULT_ERROR);
}
// Cleanup the cache.
this.cache.deleteExpiredEntries();
// Put the new entry into the cache.
this.cache.addEntry(connection.address, credentials.username || '', callbackHash, result, serverInfo);
return result;
}
}
exports.CallbackWorkflow = CallbackWorkflow;
/**
* Generate the finishing command document for authentication. Will be a
* saslStart or saslContinue depending on the presence of a conversation id.
*/
function finishCommandDocument(token, conversationId) {
if (conversationId != null && typeof conversationId === 'number') {
return {
saslContinue: 1,
conversationId: conversationId,
payload: new bson_1.Binary(bson_1.BSON.serialize({ jwt: token }))
/**
* Ensure the callback is only executed one at a time and throttles the calls
* to every 100ms.
*/
withLock(callback) {
let lock = Promise.resolve();
return async (params) => {
// We do this to ensure that we would never return the result of the
// previous lock, only the current callback's value would get returned.
await lock;
lock = lock
// eslint-disable-next-line github/no-then
.catch(() => null)
// eslint-disable-next-line github/no-then
.then(async () => {
const difference = Date.now() - this.lastExecutionTime;
if (difference <= THROTTLE_MS) {
await (0, promises_1.setTimeout)(THROTTLE_MS - difference, { signal: params.timeoutContext });
}
this.lastExecutionTime = Date.now();
return await callback(params);
});
return await lock;
};
}
// saslContinue requires a conversationId in the command to be valid so in this
// case the server allows "step two" to actually be a saslStart with the token
// as the jwt since the use of the cached value has no correlating conversating
// on the particular connection.
return {
saslStart: 1,
mechanism: providers_1.AuthMechanism.MONGODB_OIDC,
payload: new bson_1.Binary(bson_1.BSON.serialize({ jwt: token }))
};
}
exports.CallbackWorkflow = CallbackWorkflow;
/**

@@ -194,17 +148,2 @@ * Determines if a result returned from a request or refresh callback

}
/**
* Generate the saslStart command document.
*/
function startCommandDocument(credentials) {
const payload = {};
if (credentials.username) {
payload.n = credentials.username;
}
return {
saslStart: 1,
autoAuthorize: 1,
mechanism: providers_1.AuthMechanism.MONGODB_OIDC,
payload: new bson_1.Binary(bson_1.BSON.serialize(payload))
};
}
//# sourceMappingURL=callback_workflow.js.map

@@ -14,3 +14,2 @@ "use strict";

MONGODB_X509: 'MONGODB-X509',
/** @experimental */
MONGODB_OIDC: 'MONGODB-OIDC'

@@ -17,0 +16,0 @@ });

@@ -55,3 +55,3 @@ "use strict";

if (!(credentials.mechanism === providers_1.AuthMechanism.MONGODB_DEFAULT) &&
!options.authProviders.getOrCreateProvider(credentials.mechanism)) {
!options.authProviders.getOrCreateProvider(credentials.mechanism, credentials.mechanismProperties)) {
throw new error_1.MongoInvalidArgumentError(`AuthMechanism '${credentials.mechanism}' not supported`);

@@ -97,3 +97,3 @@ }

const resolvedCredentials = credentials.resolveAuthMechanism(response);
const provider = options.authProviders.getOrCreateProvider(resolvedCredentials.mechanism);
const provider = options.authProviders.getOrCreateProvider(resolvedCredentials.mechanism, resolvedCredentials.mechanismProperties);
if (!provider) {

@@ -143,3 +143,3 @@ throw new error_1.MongoInvalidArgumentError(`No AuthProvider for ${resolvedCredentials.mechanism} defined.`);

handshakeDoc.saslSupportedMechs = `${credentials.source}.${credentials.username}`;
const provider = authContext.options.authProviders.getOrCreateProvider(providers_1.AuthMechanism.MONGODB_SCRAM_SHA256);
const provider = authContext.options.authProviders.getOrCreateProvider(providers_1.AuthMechanism.MONGODB_SCRAM_SHA256, credentials.mechanismProperties);
if (!provider) {

@@ -151,3 +151,3 @@ // This auth mechanism is always present.

}
const provider = authContext.options.authProviders.getOrCreateProvider(credentials.mechanism);
const provider = authContext.options.authProviders.getOrCreateProvider(credentials.mechanism, credentials.mechanismProperties);
if (!provider) {

@@ -154,0 +154,0 @@ throw new error_1.MongoInvalidArgumentError(`No AuthProvider for ${credentials.mechanism} defined.`);

@@ -312,3 +312,3 @@ "use strict";

const resolvedCredentials = credentials.resolveAuthMechanism(connection.hello);
const provider = this[kServer].topology.client.s.authProviders.getOrCreateProvider(resolvedCredentials.mechanism);
const provider = this[kServer].topology.client.s.authProviders.getOrCreateProvider(resolvedCredentials.mechanism, resolvedCredentials.mechanismProperties);
if (!provider) {

@@ -315,0 +315,0 @@ throw new error_1.MongoMissingCredentialsError(`Reauthenticate failed due to no auth provider for ${credentials.mechanism}`);

@@ -512,2 +512,5 @@ "use strict";

},
// Note that if the authMechanismProperties contain a TOKEN_RESOURCE that has a
// comma in it, it MUST be supplied as a MongoClient option instead of in the
// connection string.
authMechanismProperties: {

@@ -514,0 +517,0 @@ target: 'credentials',

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.isResumableError = exports.isNetworkTimeoutError = exports.isSDAMUnrecoverableError = exports.isNodeShuttingDownError = exports.isRetryableReadError = exports.isRetryableWriteError = exports.needsRetryableWriteLabel = exports.MongoWriteConcernError = exports.MongoServerSelectionError = exports.MongoSystemError = exports.MongoMissingDependencyError = exports.MongoMissingCredentialsError = exports.MongoCompatibilityError = exports.MongoInvalidArgumentError = exports.MongoParseError = exports.MongoNetworkTimeoutError = exports.MongoNetworkError = exports.isNetworkErrorBeforeHandshake = exports.MongoTopologyClosedError = exports.MongoCursorExhaustedError = exports.MongoServerClosedError = exports.MongoCursorInUseError = exports.MongoUnexpectedServerResponseError = exports.MongoGridFSChunkError = exports.MongoGridFSStreamError = exports.MongoTailableCursorError = exports.MongoChangeStreamError = exports.MongoAzureError = exports.MongoAWSError = exports.MongoKerberosError = exports.MongoExpiredSessionError = exports.MongoTransactionError = exports.MongoNotConnectedError = exports.MongoDecompressionError = exports.MongoBatchReExecutionError = exports.MongoRuntimeError = exports.MongoAPIError = exports.MongoDriverError = exports.MongoServerError = exports.MongoError = exports.MongoErrorLabel = exports.GET_MORE_RESUMABLE_CODES = exports.MONGODB_ERROR_CODES = exports.NODE_IS_RECOVERING_ERROR_MESSAGE = exports.LEGACY_NOT_PRIMARY_OR_SECONDARY_ERROR_MESSAGE = exports.LEGACY_NOT_WRITABLE_PRIMARY_ERROR_MESSAGE = void 0;
exports.isResumableError = exports.isNetworkTimeoutError = exports.isSDAMUnrecoverableError = exports.isNodeShuttingDownError = exports.isRetryableReadError = exports.isRetryableWriteError = exports.needsRetryableWriteLabel = exports.MongoWriteConcernError = exports.MongoServerSelectionError = exports.MongoSystemError = exports.MongoMissingDependencyError = exports.MongoMissingCredentialsError = exports.MongoCompatibilityError = exports.MongoInvalidArgumentError = exports.MongoParseError = exports.MongoNetworkTimeoutError = exports.MongoNetworkError = exports.isNetworkErrorBeforeHandshake = exports.MongoTopologyClosedError = exports.MongoCursorExhaustedError = exports.MongoServerClosedError = exports.MongoCursorInUseError = exports.MongoUnexpectedServerResponseError = exports.MongoGridFSChunkError = exports.MongoGridFSStreamError = exports.MongoTailableCursorError = exports.MongoChangeStreamError = exports.MongoGCPError = exports.MongoAzureError = exports.MongoOIDCError = exports.MongoAWSError = exports.MongoKerberosError = exports.MongoExpiredSessionError = exports.MongoTransactionError = exports.MongoNotConnectedError = exports.MongoDecompressionError = exports.MongoBatchReExecutionError = exports.MongoRuntimeError = exports.MongoAPIError = exports.MongoDriverError = exports.MongoServerError = exports.MongoError = exports.MongoErrorLabel = exports.GET_MORE_RESUMABLE_CODES = exports.MONGODB_ERROR_CODES = exports.NODE_IS_RECOVERING_ERROR_MESSAGE = exports.LEGACY_NOT_PRIMARY_OR_SECONDARY_ERROR_MESSAGE = exports.LEGACY_NOT_WRITABLE_PRIMARY_ERROR_MESSAGE = void 0;
/** @internal */

@@ -28,2 +28,3 @@ const kErrorLabels = Symbol('errorLabels');

HostNotFound: 7,
AuthenticationFailed: 18,
NetworkTimeout: 89,

@@ -465,2 +466,29 @@ ShutdownInProgress: 91,

* A error generated when the user attempts to authenticate
* via OIDC callbacks, but fails.
*
* @public
* @category Error
*/
class MongoOIDCError extends MongoRuntimeError {
/**
* **Do not use this constructor!**
*
* Meant for internal use only.
*
* @remarks
* This class is only meant to be constructed within the driver. This constructor is
* not subject to semantic versioning compatibility guarantees and may change at any time.
*
* @public
**/
constructor(message) {
super(message);
}
get name() {
return 'MongoOIDCError';
}
}
exports.MongoOIDCError = MongoOIDCError;
/**
* A error generated when the user attempts to authenticate
* via Azure, but fails.

@@ -471,3 +499,3 @@ *

*/
class MongoAzureError extends MongoRuntimeError {
class MongoAzureError extends MongoOIDCError {
/**

@@ -493,2 +521,29 @@ * **Do not use this constructor!**

/**
* A error generated when the user attempts to authenticate
* via GCP, but fails.
*
* @public
* @category Error
*/
class MongoGCPError extends MongoOIDCError {
/**
* **Do not use this constructor!**
*
* Meant for internal use only.
*
* @remarks
* This class is only meant to be constructed within the driver. This constructor is
* not subject to semantic versioning compatibility guarantees and may change at any time.
*
* @public
**/
constructor(message) {
super(message);
}
get name() {
return 'MongoGCPError';
}
}
exports.MongoGCPError = MongoGCPError;
/**
* An error generated when a ChangeStream operation fails to execute.

@@ -495,0 +550,0 @@ *

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.MongoUnexpectedServerResponseError = exports.MongoTransactionError = exports.MongoTopologyClosedError = exports.MongoTailableCursorError = exports.MongoSystemError = exports.MongoServerSelectionError = exports.MongoServerError = exports.MongoServerClosedError = exports.MongoRuntimeError = exports.MongoParseError = exports.MongoNotConnectedError = exports.MongoNetworkTimeoutError = exports.MongoNetworkError = exports.MongoMissingDependencyError = exports.MongoMissingCredentialsError = exports.MongoKerberosError = exports.MongoInvalidArgumentError = exports.MongoGridFSStreamError = exports.MongoGridFSChunkError = exports.MongoExpiredSessionError = exports.MongoError = exports.MongoDriverError = exports.MongoDecompressionError = exports.MongoCursorInUseError = exports.MongoCursorExhaustedError = exports.MongoCompatibilityError = exports.MongoChangeStreamError = exports.MongoBatchReExecutionError = exports.MongoAzureError = exports.MongoAWSError = exports.MongoAPIError = exports.ChangeStreamCursor = exports.ClientEncryption = exports.MongoBulkWriteError = exports.UUID = exports.Timestamp = exports.ObjectId = exports.MinKey = exports.MaxKey = exports.Long = exports.Int32 = exports.Double = exports.Decimal128 = exports.DBRef = exports.Code = exports.BSONType = exports.BSONSymbol = exports.BSONRegExp = exports.Binary = exports.BSON = void 0;
exports.ConnectionPoolReadyEvent = exports.ConnectionPoolMonitoringEvent = exports.ConnectionPoolCreatedEvent = exports.ConnectionPoolClosedEvent = exports.ConnectionPoolClearedEvent = exports.ConnectionCreatedEvent = exports.ConnectionClosedEvent = exports.ConnectionCheckOutStartedEvent = exports.ConnectionCheckOutFailedEvent = exports.ConnectionCheckedOutEvent = exports.ConnectionCheckedInEvent = exports.CommandSucceededEvent = exports.CommandStartedEvent = exports.CommandFailedEvent = exports.WriteConcern = exports.ReadPreference = exports.ReadConcern = exports.TopologyType = exports.ServerType = exports.ReadPreferenceMode = exports.ReadConcernLevel = exports.ProfilingLevel = exports.ReturnDocument = exports.ServerApiVersion = exports.ExplainVerbosity = exports.MongoErrorLabel = exports.CURSOR_FLAGS = exports.Compressor = exports.AuthMechanism = exports.GSSAPICanonicalizationValue = exports.AutoEncryptionLoggerLevel = exports.BatchType = exports.UnorderedBulkOperation = exports.OrderedBulkOperation = exports.MongoClient = exports.ListIndexesCursor = exports.ListCollectionsCursor = exports.GridFSBucketWriteStream = exports.GridFSBucketReadStream = exports.GridFSBucket = exports.FindCursor = exports.Db = exports.Collection = exports.ClientSession = exports.ChangeStream = exports.CancellationToken = exports.AggregationCursor = exports.Admin = exports.AbstractCursor = exports.MongoWriteConcernError = void 0;
exports.MongoClientAuthProviders = exports.MongoCryptKMSRequestNetworkTimeoutError = exports.MongoCryptInvalidArgumentError = exports.MongoCryptError = exports.MongoCryptCreateEncryptedCollectionError = exports.MongoCryptCreateDataKeyError = exports.MongoCryptAzureKMSRequestError = exports.SrvPollingEvent = exports.WaitingForSuitableServerEvent = exports.ServerSelectionSucceededEvent = exports.ServerSelectionStartedEvent = exports.ServerSelectionFailedEvent = exports.ServerSelectionEvent = exports.TopologyOpeningEvent = exports.TopologyDescriptionChangedEvent = exports.TopologyClosedEvent = exports.ServerOpeningEvent = exports.ServerHeartbeatSucceededEvent = exports.ServerHeartbeatStartedEvent = exports.ServerHeartbeatFailedEvent = exports.ServerDescriptionChangedEvent = exports.ServerClosedEvent = exports.ConnectionReadyEvent = void 0;
exports.MongoTopologyClosedError = exports.MongoTailableCursorError = exports.MongoSystemError = exports.MongoServerSelectionError = exports.MongoServerError = exports.MongoServerClosedError = exports.MongoRuntimeError = exports.MongoParseError = exports.MongoOIDCError = exports.MongoNotConnectedError = exports.MongoNetworkTimeoutError = exports.MongoNetworkError = exports.MongoMissingDependencyError = exports.MongoMissingCredentialsError = exports.MongoKerberosError = exports.MongoInvalidArgumentError = exports.MongoGridFSStreamError = exports.MongoGridFSChunkError = exports.MongoGCPError = exports.MongoExpiredSessionError = exports.MongoError = exports.MongoDriverError = exports.MongoDecompressionError = exports.MongoCursorInUseError = exports.MongoCursorExhaustedError = exports.MongoCompatibilityError = exports.MongoChangeStreamError = exports.MongoBatchReExecutionError = exports.MongoAzureError = exports.MongoAWSError = exports.MongoAPIError = exports.ChangeStreamCursor = exports.ClientEncryption = exports.MongoBulkWriteError = exports.UUID = exports.Timestamp = exports.ObjectId = exports.MinKey = exports.MaxKey = exports.Long = exports.Int32 = exports.Double = exports.Decimal128 = exports.DBRef = exports.Code = exports.BSONType = exports.BSONSymbol = exports.BSONRegExp = exports.Binary = exports.BSON = void 0;
exports.ConnectionPoolCreatedEvent = exports.ConnectionPoolClosedEvent = exports.ConnectionPoolClearedEvent = exports.ConnectionCreatedEvent = exports.ConnectionClosedEvent = exports.ConnectionCheckOutStartedEvent = exports.ConnectionCheckOutFailedEvent = exports.ConnectionCheckedOutEvent = exports.ConnectionCheckedInEvent = exports.CommandSucceededEvent = exports.CommandStartedEvent = exports.CommandFailedEvent = exports.WriteConcern = exports.ReadPreference = exports.ReadConcern = exports.TopologyType = exports.ServerType = exports.ReadPreferenceMode = exports.ReadConcernLevel = exports.ProfilingLevel = exports.ReturnDocument = exports.ServerApiVersion = exports.ExplainVerbosity = exports.MongoErrorLabel = exports.CURSOR_FLAGS = exports.Compressor = exports.AuthMechanism = exports.GSSAPICanonicalizationValue = exports.AutoEncryptionLoggerLevel = exports.BatchType = exports.UnorderedBulkOperation = exports.OrderedBulkOperation = exports.MongoClient = exports.ListIndexesCursor = exports.ListCollectionsCursor = exports.GridFSBucketWriteStream = exports.GridFSBucketReadStream = exports.GridFSBucket = exports.FindCursor = exports.Db = exports.Collection = exports.ClientSession = exports.ChangeStream = exports.CancellationToken = exports.AggregationCursor = exports.Admin = exports.AbstractCursor = exports.MongoWriteConcernError = exports.MongoUnexpectedServerResponseError = exports.MongoTransactionError = void 0;
exports.MongoClientAuthProviders = exports.MongoCryptKMSRequestNetworkTimeoutError = exports.MongoCryptInvalidArgumentError = exports.MongoCryptError = exports.MongoCryptCreateEncryptedCollectionError = exports.MongoCryptCreateDataKeyError = exports.MongoCryptAzureKMSRequestError = exports.SrvPollingEvent = exports.WaitingForSuitableServerEvent = exports.ServerSelectionSucceededEvent = exports.ServerSelectionStartedEvent = exports.ServerSelectionFailedEvent = exports.ServerSelectionEvent = exports.TopologyOpeningEvent = exports.TopologyDescriptionChangedEvent = exports.TopologyClosedEvent = exports.ServerOpeningEvent = exports.ServerHeartbeatSucceededEvent = exports.ServerHeartbeatStartedEvent = exports.ServerHeartbeatFailedEvent = exports.ServerDescriptionChangedEvent = exports.ServerClosedEvent = exports.ConnectionReadyEvent = exports.ConnectionPoolReadyEvent = exports.ConnectionPoolMonitoringEvent = void 0;
const admin_1 = require("./admin");

@@ -78,2 +78,3 @@ Object.defineProperty(exports, "Admin", { enumerable: true, get: function () { return admin_1.Admin; } });

Object.defineProperty(exports, "MongoExpiredSessionError", { enumerable: true, get: function () { return error_1.MongoExpiredSessionError; } });
Object.defineProperty(exports, "MongoGCPError", { enumerable: true, get: function () { return error_1.MongoGCPError; } });
Object.defineProperty(exports, "MongoGridFSChunkError", { enumerable: true, get: function () { return error_1.MongoGridFSChunkError; } });

@@ -88,2 +89,3 @@ Object.defineProperty(exports, "MongoGridFSStreamError", { enumerable: true, get: function () { return error_1.MongoGridFSStreamError; } });

Object.defineProperty(exports, "MongoNotConnectedError", { enumerable: true, get: function () { return error_1.MongoNotConnectedError; } });
Object.defineProperty(exports, "MongoOIDCError", { enumerable: true, get: function () { return error_1.MongoOIDCError; } });
Object.defineProperty(exports, "MongoParseError", { enumerable: true, get: function () { return error_1.MongoParseError; } });

@@ -90,0 +92,0 @@ Object.defineProperty(exports, "MongoRuntimeError", { enumerable: true, get: function () { return error_1.MongoRuntimeError; } });

@@ -8,2 +8,5 @@ "use strict";

const mongodb_oidc_1 = require("./cmap/auth/mongodb_oidc");
const automated_callback_workflow_1 = require("./cmap/auth/mongodb_oidc/automated_callback_workflow");
const human_callback_workflow_1 = require("./cmap/auth/mongodb_oidc/human_callback_workflow");
const token_cache_1 = require("./cmap/auth/mongodb_oidc/token_cache");
const plain_1 = require("./cmap/auth/plain");

@@ -19,3 +22,3 @@ const providers_1 = require("./cmap/auth/providers");

[providers_1.AuthMechanism.MONGODB_GSSAPI, () => new gssapi_1.GSSAPI()],
[providers_1.AuthMechanism.MONGODB_OIDC, () => new mongodb_oidc_1.MongoDBOIDC()],
[providers_1.AuthMechanism.MONGODB_OIDC, (workflow) => new mongodb_oidc_1.MongoDBOIDC(workflow)],
[providers_1.AuthMechanism.MONGODB_PLAIN, () => new plain_1.Plain()],

@@ -39,2 +42,3 @@ [providers_1.AuthMechanism.MONGODB_SCRAM_SHA1, () => new scram_1.ScramSHA1()],

* @param name - The name of the provider to get or create.
* @param credentials - The credentials.
* @returns The provider.

@@ -44,3 +48,3 @@ * @throws MongoInvalidArgumentError if the mechanism is not supported.

*/
getOrCreateProvider(name) {
getOrCreateProvider(name, authMechanismProperties) {
const authProvider = this.existingProviders.get(name);

@@ -50,11 +54,37 @@ if (authProvider) {

}
const provider = AUTH_PROVIDERS.get(name)?.();
if (!provider) {
const providerFunction = AUTH_PROVIDERS.get(name);
if (!providerFunction) {
throw new error_1.MongoInvalidArgumentError(`authMechanism ${name} not supported`);
}
let provider;
if (name === providers_1.AuthMechanism.MONGODB_OIDC) {
provider = providerFunction(this.getWorkflow(authMechanismProperties));
}
else {
provider = providerFunction();
}
this.existingProviders.set(name, provider);
return provider;
}
/**
* Gets either a device workflow or callback workflow.
*/
getWorkflow(authMechanismProperties) {
if (authMechanismProperties.OIDC_HUMAN_CALLBACK) {
return new human_callback_workflow_1.HumanCallbackWorkflow(new token_cache_1.TokenCache(), authMechanismProperties.OIDC_HUMAN_CALLBACK);
}
else if (authMechanismProperties.OIDC_CALLBACK) {
return new automated_callback_workflow_1.AutomatedCallbackWorkflow(new token_cache_1.TokenCache(), authMechanismProperties.OIDC_CALLBACK);
}
else {
const environment = authMechanismProperties.ENVIRONMENT;
const workflow = mongodb_oidc_1.OIDC_WORKFLOWS.get(environment)?.();
if (!workflow) {
throw new error_1.MongoInvalidArgumentError(`Could not load workflow for environment ${authMechanismProperties.ENVIRONMENT}`);
}
return workflow;
}
}
}
exports.MongoClientAuthProviders = MongoClientAuthProviders;
//# sourceMappingURL=mongo_client_auth_providers.js.map

@@ -186,3 +186,3 @@ "use strict";

const allowedHosts = options.credentials?.mechanismProperties?.ALLOWED_HOSTS || mongo_credentials_1.DEFAULT_ALLOWED_HOSTS;
const isServiceAuth = !!options.credentials?.mechanismProperties?.PROVIDER_NAME;
const isServiceAuth = !!options.credentials?.mechanismProperties?.ENVIRONMENT;
if (!isServiceAuth) {

@@ -189,0 +189,0 @@ for (const host of options.hosts) {

@@ -183,6 +183,12 @@ "use strict";

// TODO(NODE-2674): Preserve int64 sent from MongoDB
const currentCounter = bson_1.Long.isLong(currentTv.counter)
? currentTv.counter
: bson_1.Long.fromNumber(currentTv.counter);
const newCounter = bson_1.Long.isLong(newTv.counter) ? newTv.counter : bson_1.Long.fromNumber(newTv.counter);
const currentCounter = typeof currentTv.counter === 'bigint'
? bson_1.Long.fromBigInt(currentTv.counter)
: bson_1.Long.isLong(currentTv.counter)
? currentTv.counter
: bson_1.Long.fromNumber(currentTv.counter);
const newCounter = typeof newTv.counter === 'bigint'
? bson_1.Long.fromBigInt(newTv.counter)
: bson_1.Long.isLong(newTv.counter)
? newTv.counter
: bson_1.Long.fromNumber(newTv.counter);
return currentCounter.compare(newCounter);

@@ -189,0 +195,0 @@ }

@@ -232,2 +232,7 @@ "use strict";

*
* @remarks
* **IMPORTANT**: Running operations in parallel is not supported during a transaction. The use of `Promise.all`,
* `Promise.allSettled`, `Promise.race`, etc to parallelize operations inside a transaction is
* undefined behaviour.
*
* @param options - Options for the transaction

@@ -291,2 +296,7 @@ */

*
* **IMPORTANT:** Running operations in parallel is not supported during a transaction. The use of `Promise.all`,
* `Promise.allSettled`, `Promise.race`, etc to parallelize operations inside a transaction is
* undefined behaviour.
*
*
* @remarks

@@ -293,0 +303,0 @@ * - If all operations successfully complete and the `commitTransaction` operation is successful, then the provided function will return the result of the provided function.

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.DOCUMENT_DB_MSG = exports.COSMOS_DB_CHECK = exports.DOCUMENT_DB_CHECK = exports.request = exports.matchesParentDomain = exports.parseUnsignedInteger = exports.parseInteger = exports.compareObjectId = exports.commandSupportsReadConcern = exports.shuffle = exports.supportsRetryableWrites = exports.enumToString = exports.emitWarningOnce = exports.emitWarning = exports.MONGODB_WARNING_CODE = exports.DEFAULT_PK_FACTORY = exports.HostAddress = exports.BufferPool = exports.List = exports.deepCopy = exports.isRecord = exports.setDifference = exports.isHello = exports.isSuperset = exports.resolveOptions = exports.hasAtomicOperators = exports.calculateDurationInMs = exports.now = exports.makeStateMachine = exports.errorStrictEqual = exports.arrayStrictEqual = exports.maxWireVersion = exports.uuidV4 = exports.makeCounter = exports.MongoDBCollectionNamespace = exports.MongoDBNamespace = exports.ns = exports.getTopology = exports.decorateWithExplain = exports.decorateWithReadConcern = exports.decorateWithCollation = exports.isPromiseLike = exports.applyRetryableWrites = exports.filterOptions = exports.mergeOptions = exports.isObject = exports.normalizeHintField = exports.hostMatchesWildcards = exports.isUint8Array = exports.ByteUtils = void 0;
exports.noop = exports.fileIsAccessible = exports.maybeAddIdToDocuments = exports.once = exports.randomBytes = exports.squashError = exports.promiseWithResolvers = exports.isHostMatch = exports.COSMOS_DB_MSG = void 0;
exports.COSMOS_DB_CHECK = exports.DOCUMENT_DB_CHECK = exports.request = exports.get = exports.matchesParentDomain = exports.parseUnsignedInteger = exports.parseInteger = exports.compareObjectId = exports.commandSupportsReadConcern = exports.shuffle = exports.supportsRetryableWrites = exports.enumToString = exports.emitWarningOnce = exports.emitWarning = exports.MONGODB_WARNING_CODE = exports.DEFAULT_PK_FACTORY = exports.HostAddress = exports.BufferPool = exports.List = exports.deepCopy = exports.isRecord = exports.setDifference = exports.isHello = exports.isSuperset = exports.resolveOptions = exports.hasAtomicOperators = exports.calculateDurationInMs = exports.now = exports.makeStateMachine = exports.errorStrictEqual = exports.arrayStrictEqual = exports.maxWireVersion = exports.uuidV4 = exports.makeCounter = exports.MongoDBCollectionNamespace = exports.MongoDBNamespace = exports.ns = exports.getTopology = exports.decorateWithExplain = exports.decorateWithReadConcern = exports.decorateWithCollation = exports.isPromiseLike = exports.applyRetryableWrites = exports.filterOptions = exports.mergeOptions = exports.isObject = exports.normalizeHintField = exports.hostMatchesWildcards = exports.isUint8Array = exports.ByteUtils = void 0;
exports.noop = exports.fileIsAccessible = exports.maybeAddIdToDocuments = exports.once = exports.randomBytes = exports.squashError = exports.promiseWithResolvers = exports.isHostMatch = exports.COSMOS_DB_MSG = exports.DOCUMENT_DB_MSG = void 0;
const crypto = require("crypto");
const fs_1 = require("fs");
const http = require("http");
const timers_1 = require("timers");
const url = require("url");

@@ -940,2 +941,31 @@ const url_1 = require("url");

exports.matchesParentDomain = matchesParentDomain;
/**
* Perform a get request that returns status and body.
* @internal
*/
function get(url, options = {}) {
return new Promise((resolve, reject) => {
/* eslint-disable prefer-const */
let timeoutId;
const request = http
.get(url, options, response => {
response.setEncoding('utf8');
let body = '';
response.on('data', chunk => (body += chunk));
response.on('end', () => {
(0, timers_1.clearTimeout)(timeoutId);
resolve({ status: response.statusCode, body });
});
})
.on('error', error => {
(0, timers_1.clearTimeout)(timeoutId);
reject(error);
})
.end();
timeoutId = (0, timers_1.setTimeout)(() => {
request.destroy(new error_1.MongoNetworkTimeoutError(`request timed out after 10 seconds`));
}, 10000);
});
}
exports.get = get;
async function request(uri, options = {}) {

@@ -942,0 +972,0 @@ return await new Promise((resolve, reject) => {

{
"name": "mongodb",
"version": "6.6.2",
"version": "6.7.0-dev.20240530.sha.f56938f",
"description": "The official MongoDB driver for Node.js",

@@ -71,3 +71,2 @@ "main": "lib/index.js",

"@mongodb-js/zstd": "^1.2.0",
"@octokit/core": "^6.1.2",
"@types/chai": "^4.3.14",

@@ -153,4 +152,6 @@ "@types/chai-subset": "^1.3.5",

"check:aws": "nyc mocha --config test/mocha_mongodb.json test/integration/auth/mongodb_aws.test.ts",
"check:oidc": "mocha --config test/mocha_mongodb.json test/manual/mongodb_oidc.prose.test.ts",
"check:oidc-azure": "mocha --config test/mocha_mongodb.json test/integration/auth/mongodb_oidc_azure.prose.test.ts",
"check:oidc-auth": "mocha --config test/mocha_mongodb.json test/integration/auth/auth.spec.test.ts",
"check:oidc-test": "mocha --config test/mocha_mongodb.json test/integration/auth/mongodb_oidc.prose.test.ts",
"check:oidc-azure": "mocha --config test/mocha_mongodb.json test/integration/auth/mongodb_oidc_azure.prose.05.test.ts",
"check:oidc-gcp": "mocha --config test/mocha_mongodb.json test/integration/auth/mongodb_oidc_gcp.prose.06.test.ts",
"check:ocsp": "mocha --config test/manual/mocharc.json test/manual/ocsp_support.test.js",

@@ -180,2 +181,2 @@ "check:kerberos": "nyc mocha --config test/manual/mocharc.json test/manual/kerberos.test.ts",

}
}
}
import { type Document } from '../../bson';
import { MongoCryptAzureKMSRequestError, MongoCryptKMSRequestNetworkTimeoutError } from '../errors';
import { MongoNetworkTimeoutError } from '../../error';
import { get } from '../../utils';
import { MongoCryptAzureKMSRequestError } from '../errors';
import { type KMSProviders } from './index';
import { get } from './utils';
const MINIMUM_TOKEN_REFRESH_IN_MILLISECONDS = 6000;
/** Base URL for getting Azure tokens. */
export const AZURE_BASE_URL = 'http://169.254.169.254/metadata/identity/oauth2/token?';

@@ -118,2 +121,15 @@ /**

* @internal
* Get the Azure endpoint URL.
*/
export function addAzureParams(url: URL, resource: string, username?: string): URL {
url.searchParams.append('api-version', '2018-02-01');
url.searchParams.append('resource', resource);
if (username) {
url.searchParams.append('client_id', username);
}
return url;
}
/**
* @internal
*

@@ -127,9 +143,4 @@ * parses any options provided by prose tests to `fetchAzureKMSToken` and merges them with

} {
const url = new URL(
options.url?.toString() ?? 'http://169.254.169.254/metadata/identity/oauth2/token'
);
url.searchParams.append('api-version', '2018-02-01');
url.searchParams.append('resource', 'https://vault.azure.net');
const url = new URL(options.url?.toString() ?? AZURE_BASE_URL);
addAzureParams(url, 'https://vault.azure.net');
const headers = { ...options.headers, 'Content-Type': 'application/json', Metadata: true };

@@ -157,3 +168,3 @@ return { headers, url };

} catch (error) {
if (error instanceof MongoCryptKMSRequestNetworkTimeoutError) {
if (error instanceof MongoNetworkTimeoutError) {
throw new MongoCryptAzureKMSRequestError(`[Azure KMS] ${error.message}`);

@@ -160,0 +171,0 @@ }

@@ -6,3 +6,2 @@ // Resolves the default auth mechanism according to

MongoAPIError,
MongoAzureError,
MongoInvalidArgumentError,

@@ -12,3 +11,3 @@ MongoMissingCredentialsError

import { GSSAPICanonicalizationValue } from './gssapi';
import type { OIDCRefreshFunction, OIDCRequestFunction } from './mongodb_oidc';
import type { OIDCCallbackFunction } from './mongodb_oidc';
import { AUTH_MECHS_AUTH_SRC_EXTERNAL, AuthMechanism } from './providers';

@@ -37,3 +36,7 @@

const ALLOWED_PROVIDER_NAMES: AuthMechanismProperties['PROVIDER_NAME'][] = ['aws', 'azure'];
const ALLOWED_ENVIRONMENT_NAMES: AuthMechanismProperties['ENVIRONMENT'][] = [
'test',
'azure',
'gcp'
];
const ALLOWED_HOSTS_ERROR = 'Auth mechanism property ALLOWED_HOSTS must be an array of strings.';

@@ -44,2 +47,3 @@

'*.mongodb.net',
'*.mongodb-qa.net',
'*.mongodb-dev.net',

@@ -53,4 +57,4 @@ '*.mongodbgov.net',

/** Error for when the token audience is missing in the environment. */
const TOKEN_AUDIENCE_MISSING_ERROR =
'TOKEN_AUDIENCE must be set in the auth mechanism properties when PROVIDER_NAME is azure.';
const TOKEN_RESOURCE_MISSING_ERROR =
'TOKEN_RESOURCE must be set in the auth mechanism properties when ENVIRONMENT is azure or gcp.';

@@ -64,12 +68,12 @@ /** @public */

AWS_SESSION_TOKEN?: string;
/** @experimental */
REQUEST_TOKEN_CALLBACK?: OIDCRequestFunction;
/** @experimental */
REFRESH_TOKEN_CALLBACK?: OIDCRefreshFunction;
/** @experimental */
PROVIDER_NAME?: 'aws' | 'azure';
/** @experimental */
/** A user provided OIDC machine callback function. */
OIDC_CALLBACK?: OIDCCallbackFunction;
/** A user provided OIDC human interacted callback function. */
OIDC_HUMAN_CALLBACK?: OIDCCallbackFunction;
/** The OIDC environment. Note that 'test' is for internal use only. */
ENVIRONMENT?: 'test' | 'azure' | 'gcp';
/** Allowed hosts that OIDC auth can connect to. */
ALLOWED_HOSTS?: string[];
/** @experimental */
TOKEN_AUDIENCE?: string;
/** The resource token for OIDC auth in Azure and GCP. */
TOKEN_RESOURCE?: string;
}

@@ -188,21 +192,32 @@

if (this.mechanism === AuthMechanism.MONGODB_OIDC) {
if (this.username && this.mechanismProperties.PROVIDER_NAME) {
if (
this.username &&
this.mechanismProperties.ENVIRONMENT &&
this.mechanismProperties.ENVIRONMENT !== 'azure'
) {
throw new MongoInvalidArgumentError(
`username and PROVIDER_NAME may not be used together for mechanism '${this.mechanism}'.`
`username and ENVIRONMENT '${this.mechanismProperties.ENVIRONMENT}' may not be used together for mechanism '${this.mechanism}'.`
);
}
if (this.username && this.password) {
throw new MongoInvalidArgumentError(
`No password is allowed in ENVIRONMENT '${this.mechanismProperties.ENVIRONMENT}' for '${this.mechanism}'.`
);
}
if (
this.mechanismProperties.PROVIDER_NAME === 'azure' &&
!this.mechanismProperties.TOKEN_AUDIENCE
(this.mechanismProperties.ENVIRONMENT === 'azure' ||
this.mechanismProperties.ENVIRONMENT === 'gcp') &&
!this.mechanismProperties.TOKEN_RESOURCE
) {
throw new MongoAzureError(TOKEN_AUDIENCE_MISSING_ERROR);
throw new MongoInvalidArgumentError(TOKEN_RESOURCE_MISSING_ERROR);
}
if (
this.mechanismProperties.PROVIDER_NAME &&
!ALLOWED_PROVIDER_NAMES.includes(this.mechanismProperties.PROVIDER_NAME)
this.mechanismProperties.ENVIRONMENT &&
!ALLOWED_ENVIRONMENT_NAMES.includes(this.mechanismProperties.ENVIRONMENT)
) {
throw new MongoInvalidArgumentError(
`Currently only a PROVIDER_NAME in ${ALLOWED_PROVIDER_NAMES.join(
`Currently only a ENVIRONMENT in ${ALLOWED_ENVIRONMENT_NAMES.join(
','

@@ -214,19 +229,11 @@ )} is supported for mechanism '${this.mechanism}'.`

if (
this.mechanismProperties.REFRESH_TOKEN_CALLBACK &&
!this.mechanismProperties.REQUEST_TOKEN_CALLBACK
!this.mechanismProperties.ENVIRONMENT &&
!this.mechanismProperties.OIDC_CALLBACK &&
!this.mechanismProperties.OIDC_HUMAN_CALLBACK
) {
throw new MongoInvalidArgumentError(
`A REQUEST_TOKEN_CALLBACK must be provided when using a REFRESH_TOKEN_CALLBACK for mechanism '${this.mechanism}'`
`Either a ENVIRONMENT, OIDC_CALLBACK, or OIDC_HUMAN_CALLBACK must be specified for mechanism '${this.mechanism}'.`
);
}
if (
!this.mechanismProperties.PROVIDER_NAME &&
!this.mechanismProperties.REQUEST_TOKEN_CALLBACK
) {
throw new MongoInvalidArgumentError(
`Either a PROVIDER_NAME or a REQUEST_TOKEN_CALLBACK must be specified for mechanism '${this.mechanism}'.`
);
}
if (this.mechanismProperties.ALLOWED_HOSTS) {

@@ -233,0 +240,0 @@ const hosts = this.mechanismProperties.ALLOWED_HOSTS;

@@ -8,5 +8,6 @@ import type { Document } from 'bson';

import type { MongoCredentials } from './mongo_credentials';
import { AwsServiceWorkflow } from './mongodb_oidc/aws_service_workflow';
import { AzureServiceWorkflow } from './mongodb_oidc/azure_service_workflow';
import { CallbackWorkflow } from './mongodb_oidc/callback_workflow';
import { AzureMachineWorkflow } from './mongodb_oidc/azure_machine_workflow';
import { GCPMachineWorkflow } from './mongodb_oidc/gcp_machine_workflow';
import { TokenCache } from './mongodb_oidc/token_cache';
import { TokenMachineWorkflow } from './mongodb_oidc/token_machine_workflow';

@@ -17,8 +18,15 @@ /** Error when credentials are missing. */

/**
* The information returned by the server on the IDP server.
* @public
* @experimental
*/
export interface IdPServerInfo {
export interface IdPInfo {
/**
* A URL which describes the Authentication Server. This identifier should
* be the iss of provided access tokens, and be viable for RFC8414 metadata
* discovery and RFC9207 identification.
*/
issuer: string;
/** A unique client ID for this OIDC client. */
clientId: string;
/** A list of additional scopes to request from IdP. */
requestScopes?: string[];

@@ -28,8 +36,12 @@ }

/**
* The response from the IdP server with the access token and
* optional expiration time and refresh token.
* @public
* @experimental
*/
export interface IdPServerResponse {
/** The OIDC access token. */
accessToken: string;
/** The time when the access token expires. For future use. */
expiresInSeconds?: number;
/** The refresh token, if applicable, to be used by the callback to request a new token from the issuer. */
refreshToken?: string;

@@ -39,32 +51,49 @@ }

/**
* The response required to be returned from the machine or
* human callback workflows' callback.
* @public
* @experimental
*/
export interface OIDCCallbackContext {
export interface OIDCResponse {
/** The OIDC access token. */
accessToken: string;
/** The time when the access token expires. For future use. */
expiresInSeconds?: number;
/** The refresh token, if applicable, to be used by the callback to request a new token from the issuer. */
refreshToken?: string;
timeoutSeconds?: number;
timeoutContext?: AbortSignal;
version: number;
}
/**
* The parameters that the driver provides to the user supplied
* human or machine callback.
*
* The version number is used to communicate callback API changes that are not breaking but that
* users may want to know about and review their implementation. Users may wish to check the version
* number and throw an error if their expected version number and the one provided do not match.
* @public
* @experimental
*/
export type OIDCRequestFunction = (
info: IdPServerInfo,
context: OIDCCallbackContext
) => Promise<IdPServerResponse>;
export interface OIDCCallbackParams {
/** Optional username. */
username?: string;
/** The context in which to timeout the OIDC callback. */
timeoutContext: AbortSignal;
/** The current OIDC API version. */
version: 1;
/** The IdP information returned from the server. */
idpInfo?: IdPInfo;
/** The refresh token, if applicable, to be used by the callback to request a new token from the issuer. */
refreshToken?: string;
}
/**
* The signature of the human or machine callback functions.
* @public
* @experimental
*/
export type OIDCRefreshFunction = (
info: IdPServerInfo,
context: OIDCCallbackContext
) => Promise<IdPServerResponse>;
export type OIDCCallbackFunction = (params: OIDCCallbackParams) => Promise<OIDCResponse>;
type ProviderName = 'aws' | 'azure' | 'callback';
/** The current version of OIDC implementation. */
export const OIDC_VERSION = 1;
type EnvironmentName = 'test' | 'azure' | 'gcp' | undefined;
/** @internal */
export interface Workflow {

@@ -78,28 +107,37 @@ /**

credentials: MongoCredentials,
reauthenticating: boolean,
response?: Document
): Promise<Document>;
): Promise<void>;
/**
* Each workflow should specify the correct custom behaviour for reauthentication.
*/
reauthenticate(connection: Connection, credentials: MongoCredentials): Promise<void>;
/**
* Get the document to add for speculative authentication.
*/
speculativeAuth(credentials: MongoCredentials): Promise<Document>;
speculativeAuth(connection: Connection, credentials: MongoCredentials): Promise<Document>;
}
/** @internal */
export const OIDC_WORKFLOWS: Map<ProviderName, Workflow> = new Map();
OIDC_WORKFLOWS.set('callback', new CallbackWorkflow());
OIDC_WORKFLOWS.set('aws', new AwsServiceWorkflow());
OIDC_WORKFLOWS.set('azure', new AzureServiceWorkflow());
export const OIDC_WORKFLOWS: Map<EnvironmentName, () => Workflow> = new Map();
OIDC_WORKFLOWS.set('test', () => new TokenMachineWorkflow(new TokenCache()));
OIDC_WORKFLOWS.set('azure', () => new AzureMachineWorkflow(new TokenCache()));
OIDC_WORKFLOWS.set('gcp', () => new GCPMachineWorkflow(new TokenCache()));
/**
* OIDC auth provider.
* @experimental
*/
export class MongoDBOIDC extends AuthProvider {
workflow: Workflow;
/**
* Instantiate the auth provider.
*/
constructor() {
constructor(workflow?: Workflow) {
super();
if (!workflow) {
throw new MongoInvalidArgumentError('No workflow provided to the OIDC auth provider.');
}
this.workflow = workflow;
}

@@ -112,5 +150,11 @@

const { connection, reauthenticating, response } = authContext;
if (response?.speculativeAuthenticate?.done) {
return;
}
const credentials = getCredentials(authContext);
const workflow = getWorkflow(credentials);
await workflow.execute(connection, credentials, reauthenticating, response);
if (reauthenticating) {
await this.workflow.reauthenticate(connection, credentials);
} else {
await this.workflow.execute(connection, credentials, response);
}
}

@@ -125,5 +169,5 @@

): Promise<HandshakeDocument> {
const { connection } = authContext;
const credentials = getCredentials(authContext);
const workflow = getWorkflow(credentials);
const result = await workflow.speculativeAuth(credentials);
const result = await this.workflow.speculativeAuth(connection, credentials);
return { ...handshakeDoc, ...result };

@@ -143,15 +187,1 @@ }

}
/**
* Gets either a device workflow or callback workflow.
*/
function getWorkflow(credentials: MongoCredentials): Workflow {
const providerName = credentials.mechanismProperties.PROVIDER_NAME;
const workflow = OIDC_WORKFLOWS.get(providerName || 'callback');
if (!workflow) {
throw new MongoInvalidArgumentError(
`Could not load workflow for provider ${credentials.mechanismProperties.PROVIDER_NAME}`
);
}
return workflow;
}

@@ -1,25 +0,22 @@

import { Binary, BSON, type Document } from 'bson';
import { type Document } from 'bson';
import { setTimeout } from 'timers/promises';
import { MONGODB_ERROR_CODES, MongoError, MongoMissingCredentialsError } from '../../../error';
import { MongoMissingCredentialsError } from '../../../error';
import { ns } from '../../../utils';
import type { Connection } from '../../connection';
import type { MongoCredentials } from '../mongo_credentials';
import type {
IdPServerInfo,
IdPServerResponse,
OIDCCallbackContext,
OIDCRefreshFunction,
OIDCRequestFunction,
Workflow
import {
type OIDCCallbackFunction,
type OIDCCallbackParams,
type OIDCResponse,
type Workflow
} from '../mongodb_oidc';
import { AuthMechanism } from '../providers';
import { CallbackLockCache } from './callback_lock_cache';
import { TokenEntryCache } from './token_entry_cache';
import { finishCommandDocument, startCommandDocument } from './command_builders';
import { type TokenCache } from './token_cache';
/** The current version of OIDC implementation. */
const OIDC_VERSION = 0;
/** 5 minutes in milliseconds */
export const HUMAN_TIMEOUT_MS = 300000;
/** 1 minute in milliseconds */
export const AUTOMATED_TIMEOUT_MS = 60000;
/** 5 minutes in seconds */
const TIMEOUT_S = 300;
/** Properties allowed on results of callbacks. */

@@ -32,2 +29,5 @@ const RESULT_PROPERTIES = ['accessToken', 'expiresInSeconds', 'refreshToken'];

/** The time to throttle callback calls. */
const THROTTLE_MS = 100;
/**

@@ -37,12 +37,14 @@ * OIDC implementation of a callback based workflow.

*/
export class CallbackWorkflow implements Workflow {
cache: TokenEntryCache;
callbackCache: CallbackLockCache;
export abstract class CallbackWorkflow implements Workflow {
cache: TokenCache;
callback: OIDCCallbackFunction;
lastExecutionTime: number;
/**
* Instantiate the workflow
* Instantiate the callback workflow.
*/
constructor() {
this.cache = new TokenEntryCache();
this.callbackCache = new CallbackLockCache();
constructor(cache: TokenCache, callback: OIDCCallbackFunction) {
this.cache = cache;
this.callback = this.withLock(callback);
this.lastExecutionTime = Date.now() - THROTTLE_MS;
}

@@ -54,102 +56,49 @@

*/
async speculativeAuth(credentials: MongoCredentials): Promise<Document> {
const document = startCommandDocument(credentials);
document.db = credentials.source;
return { speculativeAuthenticate: document };
async speculativeAuth(connection: Connection, credentials: MongoCredentials): Promise<Document> {
// Check if the Client Cache has an access token.
// If it does, cache the access token in the Connection Cache and send a JwtStepRequest
// with the cached access token in the speculative authentication SASL payload.
if (this.cache.hasAccessToken) {
const accessToken = this.cache.getAccessToken();
connection.accessToken = accessToken;
const document = finishCommandDocument(accessToken);
document.db = credentials.source;
return { speculativeAuthenticate: document };
}
return {};
}
/**
* Execute the OIDC callback workflow.
* Reauthenticate the callback workflow. For this we invalidated the access token
* in the cache and run the authentication steps again. No initial handshake needs
* to be sent.
*/
async execute(
connection: Connection,
credentials: MongoCredentials,
reauthenticating: boolean,
response?: Document
): Promise<Document> {
// Get the callbacks with locks from the callback lock cache.
const { requestCallback, refreshCallback, callbackHash } = this.callbackCache.getEntry(
connection,
credentials
);
// Look for an existing entry in the cache.
const entry = this.cache.getEntry(connection.address, credentials.username, callbackHash);
let result;
if (entry) {
// Reauthentication cannot use a token from the cache since the server has
// stated it is invalid by the request for reauthentication.
if (entry.isValid() && !reauthenticating) {
// Presence of a valid cache entry means we can skip to the finishing step.
result = await this.finishAuthentication(
connection,
credentials,
entry.tokenResult,
response?.speculativeAuthenticate?.conversationId
);
async reauthenticate(connection: Connection, credentials: MongoCredentials): Promise<void> {
if (this.cache.hasAccessToken) {
// Reauthentication implies the token has expired.
if (connection.accessToken === this.cache.getAccessToken()) {
// If connection's access token is the same as the cache's, remove
// the token from the cache and connection.
this.cache.removeAccessToken();
delete connection.accessToken;
} else {
// Presence of an expired cache entry means we must fetch a new one and
// then execute the final step.
const tokenResult = await this.fetchAccessToken(
connection,
credentials,
entry.serverInfo,
reauthenticating,
callbackHash,
requestCallback,
refreshCallback
);
try {
result = await this.finishAuthentication(
connection,
credentials,
tokenResult,
reauthenticating ? undefined : response?.speculativeAuthenticate?.conversationId
);
} catch (error) {
// If we are reauthenticating and this errors with reauthentication
// required, we need to do the entire process over again and clear
// the cache entry.
if (
reauthenticating &&
error instanceof MongoError &&
error.code === MONGODB_ERROR_CODES.Reauthenticate
) {
this.cache.deleteEntry(connection.address, credentials.username, callbackHash);
result = await this.execute(connection, credentials, reauthenticating);
} else {
throw error;
}
}
// If the connection's access token is different from the cache's, set
// the cache's token on the connection and do not remove from the
// cache.
connection.accessToken = this.cache.getAccessToken();
}
} else {
// No entry in the cache requires us to do all authentication steps
// from start to finish, including getting a fresh token for the cache.
const startDocument = await this.startAuthentication(
connection,
credentials,
reauthenticating,
response
);
const conversationId = startDocument.conversationId;
const serverResult = BSON.deserialize(startDocument.payload.buffer) as IdPServerInfo;
const tokenResult = await this.fetchAccessToken(
connection,
credentials,
serverResult,
reauthenticating,
callbackHash,
requestCallback,
refreshCallback
);
result = await this.finishAuthentication(
connection,
credentials,
tokenResult,
conversationId
);
}
return result;
await this.execute(connection, credentials);
}
/**
* Execute the OIDC callback workflow.
*/
abstract execute(
connection: Connection,
credentials: MongoCredentials,
response?: Document
): Promise<void>;
/**
* Starts the callback authentication process. If there is a speculative

@@ -159,10 +108,9 @@ * authentication document from the initial handshake, then we will use that

*/
private async startAuthentication(
protected async startAuthentication(
connection: Connection,
credentials: MongoCredentials,
reauthenticating: boolean,
response?: Document
): Promise<Document> {
let result;
if (!reauthenticating && response?.speculativeAuthenticate) {
if (response?.speculativeAuthenticate) {
result = response.speculativeAuthenticate;

@@ -182,93 +130,53 @@ } else {

*/
private async finishAuthentication(
protected async finishAuthentication(
connection: Connection,
credentials: MongoCredentials,
tokenResult: IdPServerResponse,
token: string,
conversationId?: number
): Promise<Document> {
const result = await connection.command(
): Promise<void> {
await connection.command(
ns(credentials.source),
finishCommandDocument(tokenResult.accessToken, conversationId),
finishCommandDocument(token, conversationId),
undefined
);
return result;
}
/**
* Fetches an access token using either the request or refresh callbacks and
* puts it in the cache.
* Executes the callback and validates the output.
*/
private async fetchAccessToken(
connection: Connection,
credentials: MongoCredentials,
serverInfo: IdPServerInfo,
reauthenticating: boolean,
callbackHash: string,
requestCallback: OIDCRequestFunction,
refreshCallback?: OIDCRefreshFunction
): Promise<IdPServerResponse> {
// Get the token from the cache.
const entry = this.cache.getEntry(connection.address, credentials.username, callbackHash);
let result;
const context: OIDCCallbackContext = { timeoutSeconds: TIMEOUT_S, version: OIDC_VERSION };
// Check if there's a token in the cache.
if (entry) {
// If the cache entry is valid, return the token result.
if (entry.isValid() && !reauthenticating) {
return entry.tokenResult;
}
// If the cache entry is not valid, remove it from the cache and first attempt
// to use the refresh callback to get a new token. If no refresh callback
// exists, then fallback to the request callback.
if (refreshCallback) {
context.refreshToken = entry.tokenResult.refreshToken;
result = await refreshCallback(serverInfo, context);
} else {
result = await requestCallback(serverInfo, context);
}
} else {
// With no token in the cache we use the request callback.
result = await requestCallback(serverInfo, context);
}
protected async executeAndValidateCallback(params: OIDCCallbackParams): Promise<OIDCResponse> {
const result = await this.callback(params);
// Validate that the result returned by the callback is acceptable. If it is not
// we must clear the token result from the cache.
if (isCallbackResultInvalid(result)) {
this.cache.deleteEntry(connection.address, credentials.username, callbackHash);
throw new MongoMissingCredentialsError(CALLBACK_RESULT_ERROR);
}
// Cleanup the cache.
this.cache.deleteExpiredEntries();
// Put the new entry into the cache.
this.cache.addEntry(
connection.address,
credentials.username || '',
callbackHash,
result,
serverInfo
);
return result;
}
}
/**
* Generate the finishing command document for authentication. Will be a
* saslStart or saslContinue depending on the presence of a conversation id.
*/
function finishCommandDocument(token: string, conversationId?: number): Document {
if (conversationId != null && typeof conversationId === 'number') {
return {
saslContinue: 1,
conversationId: conversationId,
payload: new Binary(BSON.serialize({ jwt: token }))
/**
* Ensure the callback is only executed one at a time and throttles the calls
* to every 100ms.
*/
protected withLock(callback: OIDCCallbackFunction): OIDCCallbackFunction {
let lock: Promise<any> = Promise.resolve();
return async (params: OIDCCallbackParams): Promise<OIDCResponse> => {
// We do this to ensure that we would never return the result of the
// previous lock, only the current callback's value would get returned.
await lock;
lock = lock
// eslint-disable-next-line github/no-then
.catch(() => null)
// eslint-disable-next-line github/no-then
.then(async () => {
const difference = Date.now() - this.lastExecutionTime;
if (difference <= THROTTLE_MS) {
await setTimeout(THROTTLE_MS - difference, { signal: params.timeoutContext });
}
this.lastExecutionTime = Date.now();
return await callback(params);
});
return await lock;
};
}
// saslContinue requires a conversationId in the command to be valid so in this
// case the server allows "step two" to actually be a saslStart with the token
// as the jwt since the use of the cached value has no correlating conversating
// on the particular connection.
return {
saslStart: 1,
mechanism: AuthMechanism.MONGODB_OIDC,
payload: new Binary(BSON.serialize({ jwt: token }))
};
}

@@ -286,17 +194,1 @@

}
/**
* Generate the saslStart command document.
*/
function startCommandDocument(credentials: MongoCredentials): Document {
const payload: Document = {};
if (credentials.username) {
payload.n = credentials.username;
}
return {
saslStart: 1,
autoAuthorize: 1,
mechanism: AuthMechanism.MONGODB_OIDC,
payload: new Binary(BSON.serialize(payload))
};
}

@@ -11,3 +11,2 @@ /** @public */

MONGODB_X509: 'MONGODB-X509',
/** @experimental */
MONGODB_OIDC: 'MONGODB-OIDC'

@@ -14,0 +13,0 @@ } as const);

@@ -94,3 +94,6 @@ import type { Socket, SocketConnectOpts } from 'net';

!(credentials.mechanism === AuthMechanism.MONGODB_DEFAULT) &&
!options.authProviders.getOrCreateProvider(credentials.mechanism)
!options.authProviders.getOrCreateProvider(
credentials.mechanism,
credentials.mechanismProperties
)
) {

@@ -150,3 +153,6 @@ throw new MongoInvalidArgumentError(`AuthMechanism '${credentials.mechanism}' not supported`);

const resolvedCredentials = credentials.resolveAuthMechanism(response);
const provider = options.authProviders.getOrCreateProvider(resolvedCredentials.mechanism);
const provider = options.authProviders.getOrCreateProvider(
resolvedCredentials.mechanism,
resolvedCredentials.mechanismProperties
);
if (!provider) {

@@ -223,3 +229,4 @@ throw new MongoInvalidArgumentError(

const provider = authContext.options.authProviders.getOrCreateProvider(
AuthMechanism.MONGODB_SCRAM_SHA256
AuthMechanism.MONGODB_SCRAM_SHA256,
credentials.mechanismProperties
);

@@ -234,3 +241,6 @@ if (!provider) {

}
const provider = authContext.options.authProviders.getOrCreateProvider(credentials.mechanism);
const provider = authContext.options.authProviders.getOrCreateProvider(
credentials.mechanism,
credentials.mechanismProperties
);
if (!provider) {

@@ -237,0 +247,0 @@ throw new MongoInvalidArgumentError(`No AuthProvider for ${credentials.mechanism} defined.`);

@@ -554,3 +554,4 @@ import { clearTimeout, setTimeout } from 'timers';

const provider = this[kServer].topology.client.s.authProviders.getOrCreateProvider(
resolvedCredentials.mechanism
resolvedCredentials.mechanism,
resolvedCredentials.mechanismProperties
);

@@ -557,0 +558,0 @@

@@ -177,2 +177,3 @@ import { type Readable, Transform, type TransformCallback } from 'stream';

public generation: number;
public accessToken?: string;
public readonly description: Readonly<StreamDescription>;

@@ -179,0 +180,0 @@ /**

@@ -701,2 +701,5 @@ import * as dns from 'dns';

},
// Note that if the authMechanismProperties contain a TOKEN_RESOURCE that has a
// comma in it, it MUST be supplied as a MongoClient option instead of in the
// connection string.
authMechanismProperties: {

@@ -703,0 +706,0 @@ target: 'credentials',

@@ -39,2 +39,3 @@ import type { Document } from './bson';

HostNotFound: 7,
AuthenticationFailed: 18,
NetworkTimeout: 89,

@@ -535,2 +536,30 @@ ShutdownInProgress: 91,

* A error generated when the user attempts to authenticate
* via OIDC callbacks, but fails.
*
* @public
* @category Error
*/
export class MongoOIDCError extends MongoRuntimeError {
/**
* **Do not use this constructor!**
*
* Meant for internal use only.
*
* @remarks
* This class is only meant to be constructed within the driver. This constructor is
* not subject to semantic versioning compatibility guarantees and may change at any time.
*
* @public
**/
constructor(message: string) {
super(message);
}
override get name(): string {
return 'MongoOIDCError';
}
}
/**
* A error generated when the user attempts to authenticate
* via Azure, but fails.

@@ -541,3 +570,3 @@ *

*/
export class MongoAzureError extends MongoRuntimeError {
export class MongoAzureError extends MongoOIDCError {
/**

@@ -564,2 +593,30 @@ * **Do not use this constructor!**

/**
* A error generated when the user attempts to authenticate
* via GCP, but fails.
*
* @public
* @category Error
*/
export class MongoGCPError extends MongoOIDCError {
/**
* **Do not use this constructor!**
*
* Meant for internal use only.
*
* @remarks
* This class is only meant to be constructed within the driver. This constructor is
* not subject to semantic versioning compatibility guarantees and may change at any time.
*
* @public
**/
constructor(message: string) {
super(message);
}
override get name(): string {
return 'MongoGCPError';
}
}
/**
* An error generated when a ChangeStream operation fails to execute.

@@ -566,0 +623,0 @@ *

@@ -55,2 +55,3 @@ import { Admin } from './admin';

MongoExpiredSessionError,
MongoGCPError,
MongoGridFSChunkError,

@@ -65,2 +66,3 @@ MongoGridFSStreamError,

MongoNotConnectedError,
MongoOIDCError,
MongoParseError,

@@ -255,8 +257,10 @@ MongoRuntimeError,

export type {
IdPServerInfo,
IdPInfo,
IdPServerResponse,
OIDCCallbackContext,
OIDCRefreshFunction,
OIDCRequestFunction
OIDCCallbackFunction,
OIDCCallbackParams,
OIDCResponse
} from './cmap/auth/mongodb_oidc';
export type { Workflow } from './cmap/auth/mongodb_oidc';
export type { TokenCache } from './cmap/auth/mongodb_oidc/token_cache';
export type {

@@ -263,0 +267,0 @@ MessageHeader,

import { type AuthProvider } from './cmap/auth/auth_provider';
import { GSSAPI } from './cmap/auth/gssapi';
import { type AuthMechanismProperties } from './cmap/auth/mongo_credentials';
import { MongoCR } from './cmap/auth/mongocr';
import { MongoDBAWS } from './cmap/auth/mongodb_aws';
import { MongoDBOIDC } from './cmap/auth/mongodb_oidc';
import { MongoDBOIDC, OIDC_WORKFLOWS, type Workflow } from './cmap/auth/mongodb_oidc';
import { AutomatedCallbackWorkflow } from './cmap/auth/mongodb_oidc/automated_callback_workflow';
import { HumanCallbackWorkflow } from './cmap/auth/mongodb_oidc/human_callback_workflow';
import { TokenCache } from './cmap/auth/mongodb_oidc/token_cache';
import { Plain } from './cmap/auth/plain';

@@ -13,7 +17,7 @@ import { AuthMechanism } from './cmap/auth/providers';

/** @internal */
const AUTH_PROVIDERS = new Map<AuthMechanism | string, () => AuthProvider>([
const AUTH_PROVIDERS = new Map<AuthMechanism | string, (workflow?: Workflow) => AuthProvider>([
[AuthMechanism.MONGODB_AWS, () => new MongoDBAWS()],
[AuthMechanism.MONGODB_CR, () => new MongoCR()],
[AuthMechanism.MONGODB_GSSAPI, () => new GSSAPI()],
[AuthMechanism.MONGODB_OIDC, () => new MongoDBOIDC()],
[AuthMechanism.MONGODB_OIDC, (workflow?: Workflow) => new MongoDBOIDC(workflow)],
[AuthMechanism.MONGODB_PLAIN, () => new Plain()],

@@ -37,2 +41,3 @@ [AuthMechanism.MONGODB_SCRAM_SHA1, () => new ScramSHA1()],

* @param name - The name of the provider to get or create.
* @param credentials - The credentials.
* @returns The provider.

@@ -42,3 +47,6 @@ * @throws MongoInvalidArgumentError if the mechanism is not supported.

*/
getOrCreateProvider(name: AuthMechanism | string): AuthProvider {
getOrCreateProvider(
name: AuthMechanism | string,
authMechanismProperties: AuthMechanismProperties
): AuthProvider {
const authProvider = this.existingProviders.get(name);

@@ -49,10 +57,40 @@ if (authProvider) {

const provider = AUTH_PROVIDERS.get(name)?.();
if (!provider) {
const providerFunction = AUTH_PROVIDERS.get(name);
if (!providerFunction) {
throw new MongoInvalidArgumentError(`authMechanism ${name} not supported`);
}
let provider;
if (name === AuthMechanism.MONGODB_OIDC) {
provider = providerFunction(this.getWorkflow(authMechanismProperties));
} else {
provider = providerFunction();
}
this.existingProviders.set(name, provider);
return provider;
}
/**
* Gets either a device workflow or callback workflow.
*/
getWorkflow(authMechanismProperties: AuthMechanismProperties): Workflow {
if (authMechanismProperties.OIDC_HUMAN_CALLBACK) {
return new HumanCallbackWorkflow(
new TokenCache(),
authMechanismProperties.OIDC_HUMAN_CALLBACK
);
} else if (authMechanismProperties.OIDC_CALLBACK) {
return new AutomatedCallbackWorkflow(new TokenCache(), authMechanismProperties.OIDC_CALLBACK);
} else {
const environment = authMechanismProperties.ENVIRONMENT;
const workflow = OIDC_WORKFLOWS.get(environment)?.();
if (!workflow) {
throw new MongoInvalidArgumentError(
`Could not load workflow for environment ${authMechanismProperties.ENVIRONMENT}`
);
}
return workflow;
}
}
}

@@ -13,2 +13,3 @@ import { promises as fs } from 'fs';

} from './cmap/auth/mongo_credentials';
import { type TokenCache } from './cmap/auth/mongodb_oidc/token_cache';
import { AuthMechanism } from './cmap/auth/providers';

@@ -528,3 +529,3 @@ import type { LEGAL_TCP_SOCKET_OPTIONS, LEGAL_TLS_SOCKET_OPTIONS } from './cmap/connect';

options.credentials?.mechanismProperties?.ALLOWED_HOSTS || DEFAULT_ALLOWED_HOSTS;
const isServiceAuth = !!options.credentials?.mechanismProperties?.PROVIDER_NAME;
const isServiceAuth = !!options.credentials?.mechanismProperties?.ENVIRONMENT;
if (!isServiceAuth) {

@@ -833,2 +834,4 @@ for (const host of options.hosts) {

autoEncrypter?: AutoEncrypter;
/** @internal */
tokenCache?: TokenCache;
proxyHost?: string;

@@ -835,0 +838,0 @@ proxyPort?: number;

@@ -261,8 +261,17 @@ import { type Document, Long, type ObjectId } from '../bson';

// TODO(NODE-2674): Preserve int64 sent from MongoDB
const currentCounter = Long.isLong(currentTv.counter)
? currentTv.counter
: Long.fromNumber(currentTv.counter);
const newCounter = Long.isLong(newTv.counter) ? newTv.counter : Long.fromNumber(newTv.counter);
const currentCounter =
typeof currentTv.counter === 'bigint'
? Long.fromBigInt(currentTv.counter)
: Long.isLong(currentTv.counter)
? currentTv.counter
: Long.fromNumber(currentTv.counter);
const newCounter =
typeof newTv.counter === 'bigint'
? Long.fromBigInt(newTv.counter)
: Long.isLong(newTv.counter)
? newTv.counter
: Long.fromNumber(newTv.counter);
return currentCounter.compare(newCounter);
}

@@ -372,2 +372,7 @@ import { Binary, type Document, Long, type Timestamp } from './bson';

*
* @remarks
* **IMPORTANT**: Running operations in parallel is not supported during a transaction. The use of `Promise.all`,
* `Promise.allSettled`, `Promise.race`, etc to parallelize operations inside a transaction is
* undefined behaviour.
*
* @param options - Options for the transaction

@@ -447,2 +452,7 @@ */

*
* **IMPORTANT:** Running operations in parallel is not supported during a transaction. The use of `Promise.all`,
* `Promise.allSettled`, `Promise.race`, etc to parallelize operations inside a transaction is
* undefined behaviour.
*
*
* @remarks

@@ -449,0 +459,0 @@ * - If all operations successfully complete and the `commitTransaction` operation is successful, then the provided function will return the result of the provided function.

@@ -6,2 +6,3 @@ import * as crypto from 'crypto';

import * as http from 'http';
import { clearTimeout, setTimeout } from 'timers';
import * as url from 'url';

@@ -1161,2 +1162,34 @@ import { URL } from 'url';

/**
* Perform a get request that returns status and body.
* @internal
*/
export function get(
url: URL | string,
options: http.RequestOptions = {}
): Promise<{ body: string; status: number | undefined }> {
return new Promise((resolve, reject) => {
/* eslint-disable prefer-const */
let timeoutId: NodeJS.Timeout;
const request = http
.get(url, options, response => {
response.setEncoding('utf8');
let body = '';
response.on('data', chunk => (body += chunk));
response.on('end', () => {
clearTimeout(timeoutId);
resolve({ status: response.statusCode, body });
});
})
.on('error', error => {
clearTimeout(timeoutId);
reject(error);
})
.end();
timeoutId = setTimeout(() => {
request.destroy(new MongoNetworkTimeoutError(`request timed out after 10 seconds`));
}, 10000);
});
}
export async function request(uri: string): Promise<Record<string, any>>;

@@ -1163,0 +1196,0 @@ export async function request(

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc