Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

mongodb-data-service

Package Overview
Dependencies
Maintainers
0
Versions
694
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

mongodb-data-service - npm Package Compare versions

Comparing version 0.0.0-next-0589fc49d0b69692f215b29e571cf409f853e7a6 to 0.0.0-next-05a557b9b16a2fc18ebfc264c2a10c25be92525b

lib/ssh-tunnel-helpers.d.ts

14

lib/connect-mongo-client.d.ts
/// <reference types="node" />
import { MongoClient } from 'mongodb';
import type { DevtoolsConnectOptions, DevtoolsConnectionState } from '@mongodb-js/devtools-connect';
import type SSHTunnel from '@mongodb-js/ssh-tunnel';
import type { DevtoolsProxyOptions, Tunnel } from '@mongodb-js/devtools-proxy-support';
import type { ConnectionOptions } from './connection-options';

@@ -12,5 +12,11 @@ import type { UnboundDataServiceImplLogger } from './logger';

export type ReauthenticationHandler = () => PromiseLike<void> | void;
export declare function prepareOIDCOptions(connectionOptions: Readonly<ConnectionOptions>, signal?: AbortSignal, reauthenticationHandler?: ReauthenticationHandler): Required<Pick<DevtoolsConnectOptions, 'oidc' | 'authMechanismProperties'>>;
export declare function connectMongoClientDataService({ connectionOptions, setupListeners, signal, logger, productName, productDocsLink, reauthenticationHandler, }: {
export declare function prepareOIDCOptions({ connectionOptions, proxyOptions, signal, reauthenticationHandler, }: {
connectionOptions: Readonly<ConnectionOptions>;
proxyOptions?: DevtoolsProxyOptions;
signal?: AbortSignal;
reauthenticationHandler?: ReauthenticationHandler;
}): Required<Pick<DevtoolsConnectOptions, 'oidc' | 'authMechanismProperties' | 'applyProxyToOIDC'>>;
export declare function connectMongoClientDataService({ connectionOptions, proxyOptions, setupListeners, signal, logger, productName, productDocsLink, reauthenticationHandler, }: {
connectionOptions: Readonly<ConnectionOptions>;
proxyOptions?: Readonly<DevtoolsProxyOptions>;
setupListeners: (client: MongoClient) => void;

@@ -25,3 +31,3 @@ signal?: AbortSignal;

crudClient: CloneableMongoClient,
sshTunnel: SSHTunnel | undefined,
tunnel: Tunnel | undefined,
connectionState: DevtoolsConnectionState,

@@ -28,0 +34,0 @@ options: {

@@ -9,2 +9,3 @@ "use strict";

const devtools_connect_1 = require("@mongodb-js/devtools-connect");
const devtools_proxy_support_1 = require("@mongodb-js/devtools-proxy-support");
const events_1 = __importDefault(require("events"));

@@ -14,3 +15,3 @@ const mongodb_connection_string_url_1 = __importDefault(require("mongodb-connection-string-url"));

const redact_1 = require("./redact");
const ssh_tunnel_1 = require("./ssh-tunnel");
const ssh_tunnel_helpers_1 = require("./ssh-tunnel-helpers");
const run_command_1 = require("./run-command");

@@ -20,12 +21,15 @@ const logger_1 = require("./logger");

exports.createClonedClient = Symbol('createClonedClient');
// Return an ALLOWED_HOSTS value that matches the hosts listed in the connection
// string, including possible SRV "sibling" domains.
function matchingAllowedHosts(connectionOptions) {
const connectionString = new mongodb_connection_string_url_1.default(connectionOptions.connectionString, { looseValidation: true });
const suffixes = connectionString.hosts.map((hostStr) => {
// eslint-disable-next-line
const { host } = hostStr.match(/^(?<host>.+?)(?<port>:[^:\]\[]+)?$/)
?.groups;
if (host.startsWith('[') && host.endsWith(']')) {
return host.slice(1, -1);
return host.slice(1, -1); // IPv6
}
if (host.match(/^[0-9.]+$/)) {
return host;
return host; // IPv4
}

@@ -35,2 +39,3 @@ if (!host.includes('.') || !connectionString.isSRV) {

}
// An SRV record for foo.bar.net can resolve to any hosts that match `*.bar.net`
const parts = host.split('.');

@@ -42,9 +47,10 @@ parts[0] = '*';

}
function prepareOIDCOptions(connectionOptions, signal, reauthenticationHandler) {
function prepareOIDCOptions({ connectionOptions, proxyOptions = {}, signal, reauthenticationHandler, }) {
const options = {
oidc: { ...connectionOptions.oidc },
authMechanismProperties: {},
applyProxyToOIDC: false,
};
const allowedFlows = connectionOptions.oidc?.allowedFlows ?? ['auth-code'];
let isFirstAuthAttempt = true;
let isFirstAuthAttempt = true; // Don't need to prompt for re-auth on first attempt
options.oidc.allowedFlows = async function () {

@@ -58,5 +64,15 @@ if (!isFirstAuthAttempt) {

if (connectionOptions.oidc?.enableUntrustedEndpoints) {
// Set the driver's `authMechanismProperties` (non-url) `ALLOWED_HOSTS` value
// to match the connection string hosts, including possible SRV "sibling" domains.
options.authMechanismProperties.ALLOWED_HOSTS =
matchingAllowedHosts(connectionOptions);
}
if (connectionOptions.oidc?.shareProxyWithConnection) {
options.applyProxyToOIDC = true;
}
else {
options.oidc.customHttpOptions = {
agent: (0, devtools_proxy_support_1.createAgent)(proxyOptions),
};
}
options.oidc.signal = signal;

@@ -66,5 +82,10 @@ return options;

exports.prepareOIDCOptions = prepareOIDCOptions;
async function connectMongoClientDataService({ connectionOptions, setupListeners, signal, logger, productName, productDocsLink, reauthenticationHandler, }) {
async function connectMongoClientDataService({ connectionOptions, proxyOptions = {}, setupListeners, signal, logger, productName, productDocsLink, reauthenticationHandler, }) {
debug('connectMongoClient invoked', (0, redact_1.redactConnectionOptions)(connectionOptions));
const oidcOptions = prepareOIDCOptions(connectionOptions, signal, reauthenticationHandler);
const oidcOptions = prepareOIDCOptions({
connectionOptions,
proxyOptions,
signal,
reauthenticationHandler,
});
const url = connectionOptions.connectionString;

@@ -75,6 +96,16 @@ const options = {

monitorCommands: true,
useSystemCA: connectionOptions.useSystemCA,
autoEncryption: connectionOptions.fleOptions?.autoEncryption,
...oidcOptions,
};
if (connectionOptions.lookup) {
// NB: This value is currently only passed through by compass-web in the
// browser environment as a custom way to pass extra metadata to the
// underlying socket implementation that works over the websocket protocol.
// Event though the type of `connectionOptions.lookup` is technically
// assignable to `options.lookup`, this method is not actually implementing
// the `dns.lookup` interface and lead to unexpected behavior if driver ever
// decides to use it before directly passing to the `socket.connect` method
// as an option.
options.lookup = connectionOptions.lookup;
}
if (options.autoEncryption && process.env.COMPASS_CRYPT_LIBRARY_PATH) {

@@ -89,7 +120,18 @@ options.autoEncryption = {

}
const [tunnel, socks5Options] = await (0, ssh_tunnel_1.openSshTunnel)(connectionOptions.sshTunnel, logger);
// If connectionOptions.sshTunnel is defined, open an ssh tunnel.
//
// If connectionOptions.sshTunnel is not defined, the tunnel
// will also be undefined.
const tunnel = (0, devtools_proxy_support_1.createSocks5Tunnel)((0, ssh_tunnel_helpers_1.getTunnelOptions)(connectionOptions, proxyOptions), 'generate-credentials', 'mongodb://');
// TODO: Not urgent, but it might be helpful to properly implement redaction
// and then actually log this to the log file, it's been helpful for debugging
// e2e tests for sure
// console.log({tunnel, tunnelOptions: getTunnelOptions(connectionOptions), connectionOptions, oidcOptions})
if (tunnel && logger)
(0, devtools_proxy_support_1.hookLogger)(tunnel.logger, logger, 'compass-tunnel');
const tunnelForwardingErrors = [];
tunnel?.on('forwardingError', (err) => tunnelForwardingErrors.push(err));
if (socks5Options) {
Object.assign(options, socks5Options);
await tunnel?.listen();
if (tunnel?.config) {
Object.assign(options, tunnel.config);
}

@@ -109,2 +151,4 @@ class CompassMongoClient extends mongodb_1.MongoClient {

async function connectSingleClient(overrideOptions) {
// Deep clone because of https://jira.mongodb.org/browse/NODE-4124,
// the options here are being mutated.
const connectOptions = lodash_1.default.cloneDeep({ ...options, ...overrideOptions });

@@ -131,2 +175,7 @@ const { client, state } = await (0, devtools_connect_1.connectMongoClient)(url, connectOptions, connectLogger, CompassMongoClient);

debug('waiting for MongoClient to connect ...');
// Create one or two clients, depending on whether CSFLE
// is enabled. If it is, create one for interacting with
// server metadata (e.g. build info, instance data, etc.)
// and one for interacting with the actual CRUD data.
// If CSFLE is disabled, use a single client for both cases.
[metadataClient, crudClient, state] = await Promise.race([

@@ -139,4 +188,9 @@ (async () => {

parentHandlePromise.catch(() => {
/* handled below */
});
let crudClient;
// This used to happen in parallel, but since the introduction of OIDC connection
// state needs to be shared and managed on the longest-lived client instance,
// so we need to use the DevtoolsConnectionState instance created for the metadata
// client here.
if (options.autoEncryption) {

@@ -154,2 +208,3 @@ try {

try {
// Make sure that if this failed, we clean up properly.
await parentHandlePromise;

@@ -162,6 +217,8 @@ }

}
// Return the parentHandle here so that it's included in the options that
// are passed to compass-shell.
return [metadataClient, crudClient, state];
})(),
(0, ssh_tunnel_1.waitForTunnelError)(tunnel),
]);
(0, ssh_tunnel_helpers_1.waitForTunnelError)(tunnel),
]); // waitForTunnel always throws, never resolves
options.parentHandle = await state.getStateShareServer();

@@ -180,6 +237,7 @@ return [

await Promise.all([
(0, ssh_tunnel_1.forceCloseTunnel)(tunnel, logger),
tunnel?.close(),
crudClient?.close(),
metadataClient?.close(),
]).catch(() => {
/* ignore errors */
});

@@ -186,0 +244,0 @@ if (tunnelForwardingErrors.length > 0) {

@@ -5,4 +5,6 @@ /// <reference types="node" />

import type { DataServiceImplLogger } from './logger';
export default function connect({ connectionOptions, signal, logger, productName, productDocsLink, }: {
import type { DevtoolsProxyOptions } from '@mongodb-js/devtools-proxy-support';
export default function connect({ connectionOptions, proxyOptions, signal, logger, productName, productDocsLink, }: {
connectionOptions: ConnectionOptions;
proxyOptions?: DevtoolsProxyOptions;
signal?: AbortSignal;

@@ -9,0 +11,0 @@ logger?: DataServiceImplLogger;

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const data_service_1 = require("./data-service");
async function connect({ connectionOptions, signal, logger, productName, productDocsLink, }) {
const dataService = new data_service_1.DataServiceImpl(connectionOptions, logger);
async function connect({ connectionOptions, proxyOptions, signal, logger, productName, productDocsLink, }) {
const dataService = new data_service_1.DataServiceImpl(connectionOptions, logger, proxyOptions);
await dataService.connect({

@@ -7,0 +7,0 @@ signal,

@@ -6,2 +6,3 @@ /// <reference types="node" />

import type { ConnectionOptions } from './connection-options';
import type { DevtoolsProxyOptions } from '@mongodb-js/devtools-proxy-support';
export declare class ConnectionAttempt {

@@ -13,5 +14,7 @@ _abortController: AbortController;

_logger: UnboundDataServiceImplLogger;
constructor({ connectFn, logger, }: {
_proxyOptions: DevtoolsProxyOptions | undefined;
constructor({ connectFn, logger, proxyOptions, }: {
connectFn: typeof connect;
logger: UnboundDataServiceImplLogger;
proxyOptions?: DevtoolsProxyOptions;
});

@@ -24,6 +27,7 @@ connect(connectionOptions: ConnectionOptions): Promise<DataService | void>;

}
export declare function createConnectionAttempt({ logger, connectFn, }: {
export declare function createConnectionAttempt({ logger, proxyOptions, connectFn, }: {
logger: UnboundDataServiceImplLogger;
proxyOptions: DevtoolsProxyOptions;
connectFn?: typeof connect;
}): ConnectionAttempt;
//# sourceMappingURL=connection-attempt.d.ts.map

@@ -10,3 +10,3 @@ "use strict";

const connect_1 = __importDefault(require("./connect"));
const { mongoLogId } = (0, compass_logging_1.createLoggerAndTelemetry)('CONNECTION-ATTEMPT');
const { mongoLogId } = (0, compass_logging_1.createLogger)('CONNECTION-ATTEMPT');
function isConnectionAttemptTerminatedError(err) {

@@ -16,3 +16,3 @@ return err?.name === 'MongoTopologyClosedError';

class ConnectionAttempt {
constructor({ connectFn, logger, }) {
constructor({ connectFn, logger, proxyOptions, }) {
this._closed = false;

@@ -22,2 +22,3 @@ this._dataService = null;

this._connectFn = connectFn;
this._proxyOptions = proxyOptions;
this._abortController = new AbortController();

@@ -47,2 +48,3 @@ }

logger: this._logger,
proxyOptions: this._proxyOptions,
});

@@ -74,2 +76,4 @@ return this._dataService;

catch (err) {
// When the disconnect fails, we free up the ui and we can
// silently wait for the timeout if it's still attempting to connect.
this._logger.debug('Connection Attempt', mongoLogId(1001000283), 'close requested', 'error while disconnecting from connection attempt', err);

@@ -80,5 +84,6 @@ }

exports.ConnectionAttempt = ConnectionAttempt;
function createConnectionAttempt({ logger, connectFn = connect_1.default, }) {
function createConnectionAttempt({ logger, proxyOptions, connectFn = connect_1.default, }) {
return new ConnectionAttempt({
logger,
proxyOptions,
connectFn,

@@ -85,0 +90,0 @@ });

@@ -6,21 +6,73 @@ import type { AutoEncryptionOptions } from 'mongodb';

enableUntrustedEndpoints?: boolean;
shareProxyWithConnection?: boolean;
allowedFlows?: ExtractArrayEntryType<NonNullable<DevtoolsConnectOptions['oidc']>['allowedFlows']>[];
};
export interface ConnectionOptions {
/**
* The connection string to connect to the MongoDB instance including all options set by the user.
*/
connectionString: string;
/**
* If present the connection should be established via an SSH tunnel according to the provided SSH options.
*/
sshTunnel?: ConnectionSshOptions;
useSystemCA?: boolean;
/**
* Alternative to Socks5 proxying / SSH tunnel: If set, inherit Compass's application-level
* proxy settings.
*/
useApplicationLevelProxy?: boolean;
/**
* If present the connection should use OIDC authentication.
*/
oidc?: OIDCOptions;
/**
* Options related to client-side field-level encryption.
*/
fleOptions?: ConnectionFleOptions;
/**
* Optional, a real net / tls connection callback function option that is only
* used in Compass as a way to pass extra metadata about an Atlas cluster when
* connecting in the browser environment through the websocket
*/
lookup?: () => {
wsURL: string;
projectId?: string;
clusterName?: string;
srvAddress?: string;
};
}
export interface ConnectionFleOptions {
/**
* Whether to store KMS credentials to disk or not.
*/
storeCredentials: boolean;
/**
* Encryption options passed to the driver verbatim.
*/
autoEncryption?: AutoEncryptionOptions;
}
export interface ConnectionSshOptions {
/**
* Host to establish SSH tunnel to.
*/
host: string;
/**
* Port to establish SSH tunnel to.
*/
port: number;
/**
* Username of the SSH user.
*/
username: string;
/**
* Password for SSH authentication.
*/
password?: string;
/**
* Private key file to use as SSH identity.
*/
identityKeyFile?: string;
/**
* Password for protected `identitiyFile`.
*/
identityKeyPassphrase?: string;

@@ -27,0 +79,0 @@ }

@@ -5,3 +5,14 @@ import type { MongoClient, Document } from 'mongodb';

import type { UnboundDataServiceImplLogger } from './logger';
/**
* A list of field paths for a document.
* For example, ['a', 'b'] refers to the field b of the nested document a.
* This is used rather than dot-style `a.b` notation to disambiguate
* cases in which field names contain a literal `.` character.
*/
type FieldPath = string[];
/**
* A description of the list of encrypted fields for a given collection.
* Equality-searchable fields are handled separately since they require
* special treatments in some cases.
*/
export interface CSFLEEncryptedFieldsSet {

@@ -11,4 +22,34 @@ readonly encryptedFields: FieldPath[];

}
/**
* Helper for ensuring that all fields that were decrypted when
* they were read from the server are also written back as encrypted.
*/
export interface CSFLECollectionTracker {
/**
* Returns whether performing an update (or replacement) of
* `originalDocument` from the collection `ns` is allowable
* with regards to re-encrypting fields that were originally
* decrypted.
*
* The original documents **must** have been received from the
* server or generated from a HadronDocument instance that
* was created based on a document received from the server.
* This is required for ensuring that the tracker properly
* recognizes fields that were read as decrypted fields.
*
* @param ns A MongoDB `database.collection` namespace.
* @param originalDocument The original document that was received from the server.
*/
isUpdateAllowed(ns: string, originalDocument: Document): Promise<boolean>;
/**
* Returns whether a collection is known to have a schema
* description that would prevent unintentional inserts
* of unencrypted data.
*
* This includes the case in which any server schema is
* present, not just one that indicates that there are fields
* which should be encrypted.
*
* @param ns A MongoDB `database.collection` namespace.
*/
knownSchemaForCollection(ns: string): Promise<{

@@ -15,0 +56,0 @@ hasSchema: boolean;

@@ -25,2 +25,5 @@ "use strict";

if (existingField) {
// Deduplicate field entries. If there already is one for this field,
// only make sure that the `equalityQueryable` attributes match.
// If they don't, assume that the field is not equality-queryable.
existingField.equalityQueryable &&= equalityQueryable;

@@ -59,2 +62,3 @@ }

}
// Fetch a list of encrypted fields from a JSON schema document.
function extractEncryptedFieldsFromSchema(schema) {

@@ -64,2 +68,3 @@ let ret = new CSFLEEncryptedFieldsSetImpl();

const algorithm = schema?.encrypt?.algorithm;
// Deterministic encryption in CSFLE = Equality-searchable
return ret.addField([], algorithm === 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic');

@@ -73,2 +78,3 @@ }

}
// Fetch a list of encrypted fields from a QE EncryptedFieldConfig document.
function extractEncryptedFieldsFromEncryptedFieldsConfig(encryptedFields) {

@@ -86,5 +92,7 @@ const fields = encryptedFields?.fields ?? [];

}
// Fetch a list of encrypted fields based on client-side driver options.
function extractEncrytedFieldFromAutoEncryptionOptions(ns, autoEncryption) {
return CSFLEEncryptedFieldsSetImpl.merge(extractEncryptedFieldsFromSchema(autoEncryption.schemaMap?.[ns]), extractEncryptedFieldsFromEncryptedFieldsConfig(autoEncryption?.encryptedFieldsMap?.[ns]));
}
// Fetch a list of encrypted fields based on the server-side collection info.
function extractEncryptedFieldsFromListCollectionsResult(options) {

@@ -95,2 +103,4 @@ const schema = options?.validator?.$jsonSchema;

}
// Fetch a list of encrypted fields based on a document received from the server.
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function extractEncryptedFieldsFromDocument(doc) {

@@ -120,2 +130,3 @@ const decryptedFields = (doc?.[Symbol.for('@@mdb.decryptedKeys')] ?? []).map((field) => [field]);

this._logger?.info('COMPASS-DATA-SERVICE', (0, logger_1.mongoLogId)(1001000118), 'CSFLECollectionTracker', 'Hooking AutoEncrypter metaDataClient property');
// eslint-disable-next-line @typescript-eslint/no-explicit-any
autoEncrypter._metaDataClient = this._createHookedMetadataClient(autoEncrypter._metaDataClient);

@@ -127,5 +138,13 @@ }

if (originalDocEncryptedFields.length === 0) {
// Shortcut: If no fields were encrypted when we got them
// from the server, then we also do not need to worry
// about writing them back unencrypted.
return true;
}
const { encryptedFields } = await this.knownSchemaForCollection(ns);
// Updates are allowed if there is a guarantee that all fields that
// were decrypted in the original document will also be written back
// as encrypted fields. To that end, the server or client configuration
// must contain entries that are picked up by libmongocrypt as indicators
// for encrypted fields.
for (const originalDocPath of originalDocEncryptedFields) {

@@ -145,2 +164,3 @@ if (!CSFLEEncryptedFieldsSetImpl.isEncryptedField(encryptedFields, originalDocPath)) {

_processClientSchemaDefinitions() {
// Process client-side options available at instantiation time.
const { autoEncryption } = this._crudClient.options;

@@ -164,2 +184,7 @@ for (const ns of [

this._logger?.info('COMPASS-DATA-SERVICE', (0, logger_1.mongoLogId)(1001000120), 'CSFLECollectionTracker', 'Refreshing listCollections cache', { ns });
// Let the data service fetch new collection infos.
// We installed a listener earlier which picks up the results,
// and additionally also fetches the results from unrelated
// listCollections calls so that explicitly fetching them
// becomes necessary less often.
await this._dataService.listCollections(parsedNs.database, {

@@ -172,2 +197,3 @@ name: parsedNs.collection,

_getCSFLECollectionInfo(ns) {
// Look up the internally stored CSFLE collection info for a specific namespace.
const existing = this._nsToInfo.get(ns);

@@ -183,2 +209,4 @@ if (existing)

const info = this._getCSFLECollectionInfo(ns);
// Store the updated list of encrypted fields.
// This list can be empty if no server-side validation existed or was removed.
info.serverEnforcedEncryptedFields =

@@ -190,2 +218,12 @@ extractEncryptedFieldsFromListCollectionsResult(result.options);

_createHookedMetadataClient(wrappedClient) {
// The AutoEncrypter instance used by the MongoClient will
// use listCollections to look up metadata for a given collection.
// We hook into this process to verify that this listCollections
// call does not return looser restrictions than those that
// Compass knows about and relies on.
// This listCollections call will only be made in a specific way,
// with specific arguments. If this ever changes at some point,
// we may need to work out e.g. a good semi-official hook with the
// driver team, similar to what we have for the @@mdb.decryptedFields
// functionality, but currently no such changes are planned or expected.
return {

@@ -216,2 +254,5 @@ db: (dbName) => {

if (typeof filter?.name !== 'string' || collectionInfos.length > 1) {
// This is an assertion more than an actual error condition.
// It ensures that we're only getting listCollections requests
// in the format that we expect them to come in.
return new Error(`[Compass] Unexpected listCollections request on '${dbName}' with name: '${filter?.name}'`);

@@ -229,2 +270,4 @@ }

if (filter.name !== info.name) {
// Also just a consistency check to make sure that things
// didn't go *terribly* wrong somewhere.
return new Error(`[Compass] Unexpected listCollections name mismatch: got ${info.name}, expected ${filter.name}`);

@@ -231,0 +274,0 @@ }

/// <reference types="node" />
import type { DevtoolsProxyOptions } from '@mongodb-js/devtools-proxy-support';
import { ExplainVerbosity } from 'mongodb';

@@ -27,2 +28,4 @@ import type { AggregateOptions, AggregationCursor, AnyBulkWriteOperation, BulkWriteOptions, BulkWriteResult, Collection, CountDocumentsOptions, CreateCollectionOptions, CreateIndexesOptions, DeleteOptions, DeleteResult, Document, EstimatedDocumentCountOptions, Filter, FindCursor, FindOneAndReplaceOptions, FindOneAndUpdateOptions, FindOptions, IndexSpecification, InsertManyResult, InsertOneOptions, InsertOneResult, TopologyDescription, TopologyDescriptionChangedEvent, TopologyType, IndexInformationOptions, UpdateFilter, UpdateOptions, UpdateResult, ReplaceOptions, ClientEncryptionDataKeyProvider, ClientEncryptionCreateDataKeyProviderOptions, SearchIndexDescription } from 'mongodb';

connectionInfoSecretsChanged: () => void;
close: () => void;
oidcAuthFailed: (error: string) => void;
}

@@ -54,2 +57,6 @@ export type UpdatePreviewChange = {

readonly id: number;
/*** Connection ***/
/**
* Connect the service
*/
connect(options?: {

@@ -60,4 +67,13 @@ signal?: AbortSignal;

}): Promise<void>;
/**
* Disconnect the service
*/
disconnect(): Promise<void>;
/**
* Returns whether or not current instance is connected
*/
isConnected(): boolean;
/**
* Returns connection options passed to the driver on connection
*/
getMongoClientConnectionOptions(): {

@@ -67,17 +83,65 @@ url: string;

} | undefined;
/**
* Returns connection options DataService was initialized with
*/
getConnectionOptions(): Readonly<ConnectionOptions>;
/**
* Returns connection string for the connection options DataService was
* initialized with
*/
getConnectionString(): ConnectionStringUrl;
/**
* Return the current topology type, as reported by the driver's topology
* update events.
*
* @returns The current topology type.
*/
getCurrentTopologyType(): TopologyType;
/**
* Returns the most recent topology description from the server's SDAM events.
* https://github.com/mongodb/specifications/blob/master/source/server-discovery-and-monitoring/server-discovery-and-monitoring-monitoring.rst#events
*/
getLastSeenTopology(): null | TopologyDescription;
/**
* Is the data service allowed to perform write operations.
* @returns If the data service is writable.
*/
isWritable(): boolean;
/**
* Is the data service connected to a mongos.
* @returns If the data service is connected to a mongos.
*/
isMongos(): boolean;
/*** Server Stats and Info ***/
/**
* Get the current instance details.
*/
instance(): Promise<InstanceDetails>;
/**
* Returns the results of currentOp.
*/
currentOp(): Promise<{
inprog: Document;
}>;
/**
* Returns the result of serverStatus.
*/
serverStatus(): Promise<Document>;
/**
* Returns the result of top.
*
* @param callback - the callback.
*/
top(): Promise<{
totals: Record<string, unknown>;
}>;
/**
* Kills operation by operation id
* @see {@link https://www.mongodb.com/docs/manual/reference/command/killOp/#mongodb-dbcommand-dbcmd.killOp}
*/
killOp(id: number, comment?: string): Promise<Document>;
/*** Collections ***/
/**
* List all collections for a database.
*/
listCollections(databaseName: string, filter?: Document, options?: {

@@ -87,12 +151,65 @@ nameOnly?: true;

}): Promise<ReturnType<typeof adaptCollectionInfo>[]>;
/**
* Returns normalized collection info provided by listCollection command for a
* specific collection
*
* @param dbName database name
* @param collName collection name
*/
collectionInfo(dbName: string, collName: string): Promise<ReturnType<typeof adaptCollectionInfo> | null>;
/**
* Get the stats for a collection.
*
* @param databaseName - The database name.
* @param collectionName - The collection name.
*/
collectionStats(databaseName: string, collectionName: string): Promise<CollectionStats>;
/**
* Creates a collection
*
* @param ns - The namespace.
* @param options - The options.
*/
createCollection(ns: string, options: CreateCollectionOptions): Promise<Collection<Document>>;
/**
* Create a new view.
*
* @param name - The collectionName for the view.
* @param sourceNs - The source `<db>.<collectionOrViewName>` for the view.
* @param pipeline - The agggregation pipeline for the view.
* @param options - Options e.g. collation.
*/
createView(name: string, sourceNs: string, pipeline: Document[], options: CreateCollectionOptions): Promise<Collection<Document>>;
/**
* Update a collection.
*
* @param ns - The namespace.
* @param flags - The flags.
*/
updateCollection(ns: string, flags: Document & {
collMod?: never;
}): Promise<Document>;
/**
* Drops a collection from a database
*
* @param ns - The namespace.
* @param callback - The callback.
*/
dropCollection(ns: string): Promise<boolean>;
/**
*
*/
renameCollection(ns: string, newCollectionName: string): Promise<Collection<Document>>;
/**
* Count the number of documents in the collection.
*
* @param ns - The namespace to search on.
* @param options - The query options.
* @param executionOptions - The execution options.
*/
estimatedCount(ns: string, options?: EstimatedDocumentCountOptions, executionOptions?: ExecutionOptions): Promise<number>;
/*** Databases ***/
/**
* List all databases on the currently connected instance.
*/
listDatabases(options?: {

@@ -106,9 +223,41 @@ nameOnly?: true;

}[]>;
/**
* Get the stats for a database.
*
* @param name - The database name.
* @param callback - The callback.
*/
databaseStats(name: string): Promise<ReturnType<typeof adaptDatabaseInfo> & {
name: string;
}>;
/**
* Drops a database
*
* @param name - The database name
*/
dropDatabase(name: string): Promise<boolean>;
/*** Indexes ***/
/**
* Get the indexes for the collection.
*
* @param ns - The collection namespace.
* @param options - Index information options
*/
indexes(ns: string, options?: IndexInformationOptions): Promise<IndexDefinition[]>;
/**
* Creates an index
*
* @param ns - The namespace.
* @param spec - The index specification.
* @param options - The options.
*/
createIndex(ns: string, spec: IndexSpecification, options: CreateIndexesOptions): Promise<string>;
/**
* Drops an index from a collection
*
* @param ns - The namespace.
* @param name - The index name.
*/
dropIndex(ns: string, name: string): Promise<Document>;
/*** SearchIndexes ***/
isListSearchIndexesSupported(ns: string): Promise<boolean>;

@@ -119,13 +268,102 @@ getSearchIndexes(ns: string): Promise<SearchIndex[]>;

dropSearchIndex(ns: string, name: string): Promise<void>;
/*** Aggregation ***/
/**
* Execute an aggregation framework pipeline with the provided options on the
* collection.
*
* @param ns - The namespace to search on.
* @param pipeline - The aggregation pipeline.
* @param options - The aggregation options.
* @param executionOptions - The execution options.
*/
aggregate(ns: string, pipeline: Document[], options?: AggregateOptions, executionOptions?: ExecutionOptions): Promise<Document[]>;
/**
* Returns an aggregation cursor on the collection.
*
* @param ns - The namespace to search on.
* @param pipeline - The aggregation pipeline.
* @param options - The aggregation options.
*/
aggregateCursor(ns: string, pipeline: Document[], options?: AggregateOptions): AggregationCursor;
explainAggregate(ns: string, pipeline: Document[], options: AggregateOptions, executionOptions?: ExplainExecuteOptions): Promise<Document>;
/*** Find ***/
/**
* Find documents for the provided filter and options on the collection.
*
* @param ns - The namespace to search on.
* @param filter - The query filter.
* @param options - The query options.
* @param executionOptions - The execution options.
*/
find(ns: string, filter: Filter<Document>, options?: FindOptions, executionOptions?: ExecutionOptions): Promise<Document[]>;
/**
* Returns a find cursor on the collection.
*
* @param ns - The namespace to search on.
* @param filter - The query filter.
* @param options - The query options.
*/
findCursor(ns: string, filter: Filter<Document>, options?: FindOptions): FindCursor;
/**
* Returns explain plan for the provided filter and options on the collection.
*
* @param ns - The namespace to search on.
* @param filter - The query filter.
* @param options - The query options.
* @param executionOptions - The execution options.
*/
explainFind(ns: string, filter: Filter<Document>, options?: FindOptions, executionOptions?: ExplainExecuteOptions): Promise<Document>;
/**
* Find one document and replace it with the replacement.
*
* @param ns - The namespace to search on.
* @param filter - The filter.
* @param replacement - The replacement doc.
* @param options - The query options.
*/
findOneAndReplace(ns: string, filter: Filter<Document>, replacement: Document, options?: FindOneAndReplaceOptions): Promise<Document | null>;
/**
* Find one document and update it with the update operations.
*
* @param ns - The namespace to search on.
* @param filter - The filter.
* @param update - The update operations doc.
* @param options - The query options.
*/
findOneAndUpdate(ns: string, filter: Filter<Document>, update: Document, options?: FindOneAndUpdateOptions): Promise<Document | null>;
/**
* Update one document.
*
* @param ns - The namespace to search on.
* @param filter - The filter used to select the document to update.
* @param update - The update operations to be applied to the document.
* @param options - Optional settings for the command.
*/
updateOne(ns: string, filter: Filter<Document>, update: Document, options?: UpdateOptions): Promise<Document | null>;
/**
* Replace one document.
*
* @param ns - The namespace to search on.
* @param filter - The filter.
* @param replacement - The Document that replaces the matching document.
* @param options - Optional settings for the command.
*/
replaceOne(ns: string, filter: Filter<Document>, replacement: Document, options?: UpdateOptions): Promise<Document | null>;
/**
* Count the number of documents in the collection for the provided filter
* and options.
*
* @param ns - The namespace to search on.
* @param filter - The filter query.
* @param options - The query options.
* @param executionOptions - The execution options.
*/
count(ns: string, filter: Filter<Document>, options?: CountDocumentsOptions, executionOptions?: ExecutionOptions): Promise<number>;
/**
* Sample documents from the collection.
*
* @param ns - The namespace to sample.
* @param args - The sampling options.
* @param options - Driver options (ie. maxTimeMs, session, batchSize ...)
*/
sample(ns: string, args?: {

@@ -136,21 +374,129 @@ query?: Filter<Document>;

}, options?: AggregateOptions, executionOptions?: ExecutionOptions): Promise<Document[]>;
/*** Insert ***/
/**
* Insert a single document into the database.
*
* @param ns - The namespace.
* @param doc - The document to insert.
* @param options - The options.
*/
insertOne(ns: string, doc: Document, options?: InsertOneOptions): Promise<InsertOneResult<Document>>;
/**
* Inserts multiple documents into the collection.
*
* @param ns - The namespace.
* @param docs - The documents to insert.
* @param options - The options.
* @param callback - The callback.
*/
insertMany(ns: string, docs: Document[], options?: BulkWriteOptions): Promise<InsertManyResult<Document>>;
/**
* Performs multiple write operations with controls for order of execution.
*
* @param ns Namespace
* @param operations An array of `bulkWrite()` write operations.
* @param options `bulkWrite()` options
*
* @see {@link https://www.mongodb.com/docs/manual/reference/method/db.collection.bulkWrite/}
*/
bulkWrite(ns: string, operations: AnyBulkWriteOperation[], options: BulkWriteOptions): Promise<BulkWriteResult>;
/*** Delete ***/
/**
* Delete a single document from the collection.
*
* @param ns - The namespace.
* @param filter - The filter.
* @param options - The options.
*/
deleteOne(ns: string, filter: Filter<Document>, options?: DeleteOptions): Promise<DeleteResult>;
/**
* Deletes multiple documents from a collection.
*
* @param ns - The namespace.
* @param filter - The filter.
* @param options - The options.
*/
deleteMany(ns: string, filter: Filter<Document>, options?: DeleteOptions): Promise<DeleteResult>;
/**
* Helper method to check whether or not error is caused by dataService
* operation being aborted
*
* @param error The error to check.
*/
isCancelError(error: any): ReturnType<typeof isCancelError>;
/**
* Create a new data encryption key (DEK) using the ClientEncryption
* helper class.
*/
createDataKey(provider: string, options?: unknown): Promise<Document>;
/**
* Returns current CSFLE status (`enabled` or `disabled`) or `unavailable`
* when no CSFLE configuration was provided to the dataService.
*
* Should default to `unavailable` on unsupported platforms
*/
getCSFLEMode(): 'enabled' | 'disabled' | 'unavailable';
/**
* Change current CSFLE status
*/
setCSFLEEnabled(enabled: boolean): void;
/**
* @see CSFLECollectionTracker.isUpdateAllowed
*/
isUpdateAllowed: CSFLECollectionTracker['isUpdateAllowed'];
/**
* @see CSFLECollectionTracker.knownSchemaForCollection
*/
knownSchemaForCollection: CSFLECollectionTracker['knownSchemaForCollection'];
/**
* Returns a list of configured KMS providers for the current connection
*/
configuredKMSProviders(): string[];
/**
* Register reauthentication handlers with this DataService instance.
*/
addReauthenticationHandler(handler: ReauthenticationHandler): void;
/**
* Return the current state of ConnectionOptions secrets, which may have changed
* since connecting (e.g. OIDC tokens). The `connectionInfoSecretsChanged` event
* is being emitted when this value changes.
*/
getUpdatedSecrets(): Promise<Partial<ConnectionOptions>>;
/**
* Runs the update within a transactions, only
* modifying a subset of the documents matched by the filter.
* It returns a list of the changed documents, or a serverError.
*/
previewUpdate(ns: string, filter: Document, update: Document | Document[], executionOptions?: UpdatePreviewExecutionOptions): Promise<UpdatePreview>;
/**
* Updates multiple documents from a collection.
*
* @param ns - The namespace.
* @param filter - The filter.
* @param update - The update.
* @param options - The options.
*/
updateMany(ns: string, filter: Filter<Document>, update: UpdateFilter<Document>, options?: UpdateOptions): Promise<UpdateResult>;
/*** Streams ***/
/**
* List all the named stream processors.
*/
listStreamProcessors(filter?: Document): Promise<StreamProcessor[]>;
/**
* Start the specified stream processor
*
* @param name processor name
*/
startStreamProcessor(name: string): Promise<void>;
/**
* Stop the specified stream processor
*
* @param name processor name
*/
stopStreamProcessor(name: string): Promise<void>;
/**
* Drop the specified stream processor
*
* @param name processor name
*/
dropStreamProcessor(name: string): Promise<void>;

@@ -160,2 +506,3 @@ }

private readonly _connectionOptions;
private readonly _proxyOptions;
private _isConnecting;

@@ -170,2 +517,6 @@ private _mongoClientConnectionOptions?;

private _reauthenticationHandlers;
/**
* Stores the most recent topology description from the server's SDAM events:
* https://github.com/mongodb/specifications/blob/master/source/server-discovery-and-monitoring/server-discovery-and-monitoring-monitoring.rst#events
*/
private _lastSeenTopology;

@@ -175,5 +526,12 @@ private _isWritable;

private _emitter;
/**
* Directly used during data-service runtime, auto sets component and context
* with a connection id
*/
protected _logger: BoundLogger;
/**
* To be passed to the connect-mongo-client
*/
private _unboundLogger?;
constructor(connectionOptions: Readonly<ConnectionOptions>, logger?: DataServiceImplLogger);
constructor(connectionOptions: Readonly<ConnectionOptions>, logger?: DataServiceImplLogger, proxyOptions?: DevtoolsProxyOptions);
get id(): number;

@@ -289,7 +647,39 @@ on(...args: Parameters<DataService['on']>): this;

previewUpdate(ns: string, filter: Document, update: Document | Document[], executionOptions?: UpdatePreviewExecutionOptions): Promise<UpdatePreview>;
/**
* @param databaseName - The name of the database.
* @param collectionName - The name of the collection.
* @param data - The result of the collStats command.
*/
private _buildCollectionStats;
/**
* Get the collection to operate on.
*
* @param ns - The namespace.
*/
private _collection;
/**
* Get the database to operate on.
*
* @param ns - The namespace.
*/
private _database;
/**
* Get the collection name from a namespace.
*
* @param ns - The namespace in database.collection format.
*/
private _collectionName;
/**
* Get the database name from a namespace.
*
* @param ns - The namespace in database.collection format.
*/
private _databaseName;
/**
* Determine if the hello response indicates a writable server.
*
* @param evt - The topology description changed event.
*
* @returns If the server is writable.
*/
private _checkIsWritable;

@@ -296,0 +686,0 @@ private _cleanup;

@@ -27,3 +27,3 @@ export type IndexInfo = {

properties: ('unique' | 'sparse' | 'partial' | 'ttl' | 'collation')[];
extra: Record<string, string | number | Record<string, any>>;
extra: Record<string, string | number | boolean | Record<string, any>>;
size: IndexSize;

@@ -30,0 +30,0 @@ relativeSize: number;

@@ -48,2 +48,3 @@ "use strict";

}
// Columnstore is before wildcard as it is a special case of wildcard.
if (keyValues.includes('columnstore')) {

@@ -50,0 +51,0 @@ return 'columnstore';

@@ -13,2 +13,3 @@ import connect from './connect';

export type { SearchIndex, SearchIndexStatus, } from './search-index-detail-helper';
export type { InstanceDetails } from './instance-detail-helper';
//# sourceMappingURL=index.d.ts.map

@@ -16,3 +16,9 @@ "use strict";

(0, run_command_1.runCommand)(adminDb, { hostInfo: 1 }).catch(ignoreNotAuthorized({})),
// This command should always pass, if it throws, somethings is really off.
// This is why it's the only one where we are not ignoring any types of
// errors
(0, run_command_1.runCommand)(adminDb, { buildInfo: 1 }),
// This command is only here to get data for the logs and telemetry, if it
// failed (e.g., not authorised or not supported) we should just ignore the
// failure
(0, run_command_1.runCommand)(adminDb, {

@@ -38,2 +44,5 @@ getParameter: 1,

isAtlas,
// If a user is connected to Cloud Atlas, its possible they can have
// admin.atlascli with data that mongo-build-info uses to check for
// local atlas. So we set isLocalAtlas to false in such cases.
isLocalAtlas: isAtlas ? false : isLocalAtlas,

@@ -88,2 +97,10 @@ };

for (const { resource, actions } of filteredPrivileges) {
// Documented resources include roles for dbs/colls, cluster, or in rare cases
// anyResource, additionally there seem to be undocumented ones like
// system_buckets and who knows what else. To make sure we are only cover
// cases that we can meaningfully handle here, roles for the
// databases/collections, we are skipping all roles where these are
// undefined
//
// See: https://docs.mongodb.com/manual/reference/resource-document/#std-label-resource-document
if (typeof resource.db !== 'undefined' &&

@@ -103,2 +120,3 @@ typeof resource.collection !== 'undefined') {

exports.getPrivilegesByDatabaseAndCollection = getPrivilegesByDatabaseAndCollection;
// Return a list of the databases which have a role matching one of the roles.
function getDatabasesByRoles(authenticatedUserRoles = null, possibleRoles = null) {

@@ -163,2 +181,3 @@ const roles = authenticatedUserRoles ?? [];

version: rawBuildInfo.version ?? '',
// Cover both cases of detecting enterprise module, see SERVER-18099.
isEnterprise: (0, mongodb_build_info_1.isEnterprise)(rawBuildInfo),

@@ -165,0 +184,0 @@ };

/// <reference types="debug" />
import type { LoggerAndTelemetry } from '@mongodb-js/compass-logging';
import type { Logger } from '@mongodb-js/compass-logging';
import { mongoLogId } from '@mongodb-js/compass-logging';

@@ -7,3 +7,3 @@ export declare const debug: import("debug").Debugger;

type MongoLogId = ReturnType<typeof mongoLogId>;
export type DataServiceImplLogger = Pick<LoggerAndTelemetry['log']['unbound'], 'debug' | 'info' | 'warn' | 'error' | 'fatal'>;
export type DataServiceImplLogger = Pick<Logger['log']['unbound'], 'debug' | 'info' | 'warn' | 'error' | 'fatal'>;
export type UnboundDataServiceImplLogger = DataServiceImplLogger & {

@@ -10,0 +10,0 @@ mongoLogId: (id: number) => MongoLogId;

@@ -1,2 +0,1 @@

import type { SshTunnelConfig } from '@mongodb-js/ssh-tunnel';
import type { ConnectionOptions } from './connection-options';

@@ -6,3 +5,2 @@ import { redactConnectionString } from 'mongodb-connection-string-url';

export declare function redactConnectionOptions(options: ConnectionOptions): ConnectionOptions;
export declare function redactSshTunnelOptions<T extends Partial<SshTunnelConfig>>(options: T): T;
//# sourceMappingURL=redact.d.ts.map
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.redactSshTunnelOptions = exports.redactConnectionOptions = exports.redactConnectionString = void 0;
exports.redactConnectionOptions = exports.redactConnectionString = void 0;
const mongodb_connection_string_url_1 = require("mongodb-connection-string-url");

@@ -26,16 +26,2 @@ Object.defineProperty(exports, "redactConnectionString", { enumerable: true, get: function () { return mongodb_connection_string_url_1.redactConnectionString; } });

exports.redactConnectionOptions = redactConnectionOptions;
function redactSshTunnelOptions(options) {
const redacted = { ...options };
if (redacted.password) {
redacted.password = '<redacted>';
}
if (redacted.privateKey) {
redacted.privateKey = '<redacted>';
}
if (redacted.passphrase) {
redacted.passphrase = '<redacted>';
}
return redacted;
}
exports.redactSshTunnelOptions = redactSshTunnelOptions;
//# sourceMappingURL=redact.js.map

@@ -81,4 +81,10 @@ import type { Document, Db, RunCommandOptions, ServerSessionId } from 'mongodb';

storageEngines: string[];
/**
* @unstable
* @deprecated
*/
sysInfo: 'deprecated';
/** @unstable */
allocator: string;
/** @unstable */
buildEnvironment: unknown;

@@ -101,2 +107,5 @@ };

};
/**
* @see {@link https://www.mongodb.com/docs/manual/reference/command/nav-diagnostic/}
*/
interface RunDiagnosticsCommand {

@@ -172,2 +181,3 @@ (db: Db, spec: {

export type CollectionInfo = CollectionInfoNameOnly & {
/** @see https://docs.mongodb.com/manual/reference/method/db.createCollection/#mongodb-method-db.createCollection */
options: Document;

@@ -201,2 +211,5 @@ info: {

};
/**
* @see {@link https://www.mongodb.com/docs/manual/reference/command/nav-administration/}
*/
interface RunAdministrationCommand {

@@ -254,2 +267,5 @@ <Parameters extends Record<string, unknown>>(db: Db, spec: {

}
/**
* @see {@link https://www.mongodb.com/docs/v6.0/reference/command/nav-sessions/}
*/
interface RunSessionCommand {

@@ -262,4 +278,13 @@ (db: Db, spec: {

}
/**
* Runs command against provided database using db.command. Provides a better
* return type based on provided command spec
*
* @param db database to run command against
* @param spec command name in the format { <command name>: 1 }
* @param options command options
* @returns command result
*/
export declare const runCommand: RunCommand;
export {};
//# sourceMappingURL=run-command.d.ts.map

@@ -5,7 +5,29 @@ "use strict";

const mongodb_1 = require("mongodb");
/**
* Runs command against provided database using db.command. Provides a better
* return type based on provided command spec
*
* @param db database to run command against
* @param spec command name in the format { <command name>: 1 }
* @param options command options
* @returns command result
*/
const runCommand = (db, spec, options) => {
/**
* NB: Driver spec says that drivers command method should always run commands
* with primary readPreference disregarding whatever is the readPreference
* of the client/database. We don't want that, and instead want all the
* commands to run with whatever readPreference user has provided during the
* connection
*
* @see https://github.com/mongodb/specifications/blob/master/source/server-selection/server-selection.rst#use-of-read-preferences-with-commands
*/
const readPreference = db.readPreference ?? mongodb_1.ReadPreference.PRIMARY_PREFERRED;
return db.command({ ...spec }, { readPreference, ...options });
return db.command({ ...spec }, { readPreference, ...options }
// It's pretty hard to convince TypeScript that we are doing the right thing
// here due to how vague the driver types are hence the `any` assertion
// eslint-disable-next-line @typescript-eslint/no-explicit-any
);
};
exports.runCommand = runCommand;
//# sourceMappingURL=run-command.js.map

@@ -10,3 +10,3 @@ {

"homepage": "https://github.com/mongodb-js/compass",
"version": "0.0.0-next-0589fc49d0b69692f215b29e571cf409f853e7a6",
"version": "0.0.0-next-05a557b9b16a2fc18ebfc264c2a10c25be92525b",
"repository": {

@@ -55,21 +55,21 @@ "type": "git",

"dependencies": {
"@mongodb-js/compass-logging": "0.0.0-next-0589fc49d0b69692f215b29e571cf409f853e7a6",
"@mongodb-js/compass-utils": "0.0.0-next-0589fc49d0b69692f215b29e571cf409f853e7a6",
"@mongodb-js/devtools-connect": "^2.6.0",
"@mongodb-js/ssh-tunnel": "0.0.0-next-0589fc49d0b69692f215b29e571cf409f853e7a6",
"bson": "^6.6.0",
"@mongodb-js/compass-logging": "0.0.0-next-05a557b9b16a2fc18ebfc264c2a10c25be92525b",
"@mongodb-js/compass-utils": "0.0.0-next-05a557b9b16a2fc18ebfc264c2a10c25be92525b",
"@mongodb-js/devtools-connect": "^3.3.1",
"@mongodb-js/devtools-proxy-support": "^0.4.1",
"bson": "^6.8.0",
"lodash": "^4.17.21",
"mongodb": "^6.5.0",
"mongodb-build-info": "^1.7.0",
"mongodb-connection-string-url": "^2.6.0",
"mongodb-ns": "^2.4.0"
"mongodb": "^6.9.0",
"mongodb-build-info": "^1.7.2",
"mongodb-connection-string-url": "^3.0.1",
"mongodb-ns": "^2.4.2"
},
"devDependencies": {
"@mongodb-js/compass-test-server": "0.0.0-next-0589fc49d0b69692f215b29e571cf409f853e7a6",
"@mongodb-js/devtools-docker-test-envs": "^1.3.2",
"@mongodb-js/eslint-config-compass": "0.0.0-next-0589fc49d0b69692f215b29e571cf409f853e7a6",
"@mongodb-js/mocha-config-compass": "0.0.0-next-0589fc49d0b69692f215b29e571cf409f853e7a6",
"@mongodb-js/oidc-plugin": "^0.4.0",
"@mongodb-js/prettier-config-compass": "0.0.0-next-0589fc49d0b69692f215b29e571cf409f853e7a6",
"@mongodb-js/tsconfig-compass": "0.0.0-next-0589fc49d0b69692f215b29e571cf409f853e7a6",
"@mongodb-js/compass-test-server": "0.0.0-next-05a557b9b16a2fc18ebfc264c2a10c25be92525b",
"@mongodb-js/devtools-docker-test-envs": "^1.3.3",
"@mongodb-js/eslint-config-compass": "0.0.0-next-05a557b9b16a2fc18ebfc264c2a10c25be92525b",
"@mongodb-js/mocha-config-compass": "0.0.0-next-05a557b9b16a2fc18ebfc264c2a10c25be92525b",
"@mongodb-js/oidc-plugin": "^1.1.1",
"@mongodb-js/prettier-config-compass": "0.0.0-next-05a557b9b16a2fc18ebfc264c2a10c25be92525b",
"@mongodb-js/tsconfig-compass": "0.0.0-next-05a557b9b16a2fc18ebfc264c2a10c25be92525b",
"@types/lodash": "^4.14.188",

@@ -81,4 +81,5 @@ "@types/whatwg-url": "^8.2.1",

"eslint": "^7.25.0",
"kerberos": "^2.1.0",
"kerberos": "^2.2.0",
"mocha": "^10.2.0",
"mongodb-log-writer": "^1.4.2",
"nyc": "^15.1.0",

@@ -91,5 +92,5 @@ "prettier": "^2.7.1",

"optionalDependencies": {
"mongodb-client-encryption": "^6.0.0"
"mongodb-client-encryption": "^6.1.0"
},
"gitHead": "0589fc49d0b69692f215b29e571cf409f853e7a6"
"gitHead": "05a557b9b16a2fc18ebfc264c2a10c25be92525b"
}

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc