Socket
Socket
Sign inDemoInstall

mongodb-memory-server-core

Package Overview
Dependencies
Maintainers
2
Versions
270
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

mongodb-memory-server-core - npm Package Compare versions

Comparing version 8.15.0 to 9.0.0-beta.1

lib/util/getport/index.d.ts

22

lib/MongoMemoryReplSet.d.ts
/// <reference types="node" />
/// <reference types="node" />
import { EventEmitter } from 'events';

@@ -13,6 +14,6 @@ import { MongoMemoryServer, AutomaticAuth } from './MongoMemoryServer';

/**
* enable auth ("--auth" / "--noauth")
* Enable Authentication
* @default false
*/
auth?: boolean | AutomaticAuth;
auth?: AutomaticAuth;
/**

@@ -196,9 +197,2 @@ * additional command line arguments passed to `mongod`

* Stop the underlying `mongod` instance(s).
* @param runCleanup run "this.cleanup"? (remove dbPath & reset "instanceInfo")
*
* @deprecated replace argument with `Cleanup` interface object
*/
stop(runCleanup: boolean): Promise<boolean>;
/**
* Stop the underlying `mongod` instance(s).
* @param cleanupOptions Set how to run ".cleanup", by default only `{ doCleanup: true }` is used

@@ -209,12 +203,2 @@ */

* Remove the defined dbPath's
* @param force Remove the dbPath even if it is no "tmpDir" (and re-check if tmpDir actually removed it)
* @throws If "state" is not "stopped"
* @throws If "instanceInfo" is not defined
* @throws If an fs error occured
*
* @deprecated replace argument with `Cleanup` interface object
*/
cleanup(force: boolean): Promise<void>;
/**
* Remove the defined dbPath's
* @param options Set how to run a cleanup, by default `{ doCleanup: true }` is used

@@ -221,0 +205,0 @@ * @throws If "state" is not "stopped"

@@ -8,3 +8,3 @@ "use strict";

const utils_1 = require("./util/utils");
const debug_1 = (0, tslib_1.__importDefault)(require("debug"));
const debug_1 = tslib_1.__importDefault(require("debug"));
const mongodb_1 = require("mongodb");

@@ -37,3 +37,2 @@ const MongoInstance_1 = require("./util/MongoInstance");

constructor(opts = {}) {
var _a;
super();

@@ -46,5 +45,5 @@ /**

this._ranCreateAuth = false;
this.binaryOpts = Object.assign({}, opts.binary);
this.instanceOpts = (_a = opts.instanceOpts) !== null && _a !== void 0 ? _a : [];
this.replSetOpts = Object.assign({}, opts.replSet);
this.binaryOpts = { ...opts.binary };
this.instanceOpts = opts.instanceOpts ?? [];
this.replSetOpts = { ...opts.replSet };
}

@@ -63,9 +62,7 @@ /**

*/
static create(opts) {
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
log('create: Called .create() method');
const replSet = new this(Object.assign({}, opts));
yield replSet.start();
return replSet;
});
static async create(opts) {
log('create: Called .create() method');
const replSet = new this({ ...opts });
await replSet.start();
return replSet;
}

@@ -111,3 +108,3 @@ /**

const defaults = {
auth: false,
auth: { enable: false },
args: [],

@@ -122,10 +119,6 @@ name: 'testset',

};
this._replSetOpts = Object.assign(Object.assign({}, defaults), val);
this._replSetOpts = { ...defaults, ...val };
(0, utils_1.assertion)(this._replSetOpts.count > 0, new errors_1.ReplsetCountLowError(this._replSetOpts.count));
// setting this for sanity
if (typeof this._replSetOpts.auth === 'boolean') {
this._replSetOpts.auth = { disable: !this._replSetOpts.auth };
}
// do not set default when "disable" is "true" to save execution and memory
if (!this._replSetOpts.auth.disable) {
// only set default is enabled
if (this._replSetOpts.auth.enable) {
this._replSetOpts.auth = (0, utils_1.authDefault)(this._replSetOpts.auth);

@@ -144,5 +137,5 @@ }

(0, utils_1.assertion)(typeof this._replSetOpts.auth === 'object', new errors_1.AuthNotObjectError());
return typeof this._replSetOpts.auth.disable === 'boolean' // if "this._replSetOpts.auth.disable" is defined, use that
? !this._replSetOpts.auth.disable // invert the disable boolean, because "auth" should only be disabled if "disabled = true"
: true; // if "this._replSetOpts.auth.disable" is not defined, default to true because "this._replSetOpts.auth" is defined
return typeof this._replSetOpts.auth.enable === 'boolean' // if "this._replSetOpts.auth.enable" is defined, use that
? this._replSetOpts.auth.enable
: false; // if "this._replSetOpts.auth.enable" is not defined, default to false
}

@@ -208,4 +201,3 @@ /**

.map((s) => {
var _a;
const port = (_a = s.instanceInfo) === null || _a === void 0 ? void 0 : _a.port;
const port = s.instanceInfo?.port;
(0, utils_1.assertion)(!(0, utils_1.isNullOrUndefined)(port), new Error('Instance Port is undefined!'));

@@ -224,25 +216,23 @@ const ip = otherIp || '127.0.0.1';

*/
start() {
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
log('start:', this.state);
switch (this.state) {
case MongoMemoryReplSetStates.stopped:
break;
case MongoMemoryReplSetStates.running:
default:
throw new errors_1.StateError([MongoMemoryReplSetStates.stopped], this.state);
async start() {
log('start:', this.state);
switch (this.state) {
case MongoMemoryReplSetStates.stopped:
break;
case MongoMemoryReplSetStates.running:
default:
throw new errors_1.StateError([MongoMemoryReplSetStates.stopped], this.state);
}
this.stateChange(MongoMemoryReplSetStates.init); // this needs to be executed before "setImmediate"
await (0, utils_1.ensureAsync)()
.then(() => this.initAllServers())
.then(() => this._initReplSet())
.catch(async (err) => {
if (!debug_1.default.enabled('MongoMS:MongoMemoryReplSet')) {
console.warn('Starting the MongoMemoryReplSet Instance failed, enable debug log for more information. Error:\n', err);
}
this.stateChange(MongoMemoryReplSetStates.init); // this needs to be executed before "setImmediate"
yield (0, utils_1.ensureAsync)()
.then(() => this.initAllServers())
.then(() => this._initReplSet())
.catch((err) => (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
if (!debug_1.default.enabled('MongoMS:MongoMemoryReplSet')) {
console.warn('Starting the MongoMemoryReplSet Instance failed, enable debug log for more information. Error:\n', err);
}
log('ensureAsync chain threw a Error: ', err);
yield this.stop({ doCleanup: false, force: false }); // still try to close the instance that was spawned, without cleanup for investigation
this.stateChange(MongoMemoryReplSetStates.stopped);
throw err;
}));
log('ensureAsync chain threw a Error: ', err);
await this.stop({ doCleanup: false, force: false }); // still try to close the instance that was spawned, without cleanup for investigation
this.stateChange(MongoMemoryReplSetStates.stopped);
throw err;
});

@@ -253,50 +243,48 @@ }

*/
initAllServers() {
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
log('initAllServers');
this.stateChange(MongoMemoryReplSetStates.init);
if (this.servers.length > 0) {
log('initAllServers: lenght of "servers" is higher than 0, starting existing servers');
if (this._ranCreateAuth) {
log('initAllServers: "_ranCreateAuth" is true, re-using auth');
const keyfilepath = (0, path_1.resolve)(yield this.ensureKeyFile(), 'keyfile');
for (const server of this.servers) {
(0, utils_1.assertion)(!(0, utils_1.isNullOrUndefined)(server.instanceInfo), new errors_1.InstanceInfoError('MongoMemoryReplSet.initAllServers'));
(0, utils_1.assertion)(typeof this._replSetOpts.auth === 'object', new errors_1.AuthNotObjectError());
server.instanceInfo.instance.instanceOpts.auth = true;
server.instanceInfo.instance.instanceOpts.keyfileLocation = keyfilepath;
server.instanceInfo.instance.extraConnectionOptions = {
authSource: 'admin',
authMechanism: 'SCRAM-SHA-256',
auth: {
username: this._replSetOpts.auth.customRootName,
password: this._replSetOpts.auth.customRootPwd,
},
};
}
async initAllServers() {
log('initAllServers');
this.stateChange(MongoMemoryReplSetStates.init);
if (this.servers.length > 0) {
log('initAllServers: lenght of "servers" is higher than 0, starting existing servers');
if (this._ranCreateAuth) {
log('initAllServers: "_ranCreateAuth" is true, re-using auth');
const keyfilepath = (0, path_1.resolve)(await this.ensureKeyFile(), 'keyfile');
for (const server of this.servers) {
(0, utils_1.assertion)(!(0, utils_1.isNullOrUndefined)(server.instanceInfo), new errors_1.InstanceInfoError('MongoMemoryReplSet.initAllServers'));
(0, utils_1.assertion)(typeof this._replSetOpts.auth === 'object', new errors_1.AuthNotObjectError());
server.instanceInfo.instance.instanceOpts.auth = true;
server.instanceInfo.instance.instanceOpts.keyfileLocation = keyfilepath;
server.instanceInfo.instance.extraConnectionOptions = {
authSource: 'admin',
authMechanism: 'SCRAM-SHA-256',
auth: {
username: this._replSetOpts.auth.customRootName,
password: this._replSetOpts.auth.customRootPwd,
},
};
}
yield Promise.all(this.servers.map((s) => s.start(true)));
log('initAllServers: finished starting existing instances again');
return;
}
let keyfilePath = undefined;
if (this.enableAuth()) {
keyfilePath = (0, path_1.resolve)(yield this.ensureKeyFile(), 'keyfile');
}
// Any servers defined within `_instanceOpts` should be started first as
// the user could have specified a `dbPath` in which case we would want to perform
// the `replSetInitiate` command against that server.
this._instanceOpts.forEach((opts, index) => {
log(`initAllServers: starting special server "${index + 1}" of "${this._instanceOpts.length}" from instanceOpts (count: ${this.servers.length + 1}):`, opts);
this.servers.push(this._initServer(this.getInstanceOpts(opts, keyfilePath)));
});
while (this.servers.length < this._replSetOpts.count) {
log(`initAllServers: starting extra server "${this.servers.length + 1}" of "${this._replSetOpts.count}" (count: ${this.servers.length + 1})`);
this.servers.push(this._initServer(this.getInstanceOpts(undefined, keyfilePath)));
}
log('initAllServers: waiting for all servers to finish starting');
// ensures all servers are listening for connection
yield Promise.all(this.servers.map((s) => s.start()));
log('initAllServers: finished starting all servers initially');
await Promise.all(this.servers.map((s) => s.start(true)));
log('initAllServers: finished starting existing instances again');
return;
}
let keyfilePath = undefined;
if (this.enableAuth()) {
keyfilePath = (0, path_1.resolve)(await this.ensureKeyFile(), 'keyfile');
}
// Any servers defined within `_instanceOpts` should be started first as
// the user could have specified a `dbPath` in which case we would want to perform
// the `replSetInitiate` command against that server.
this._instanceOpts.forEach((opts, index) => {
log(`initAllServers: starting special server "${index + 1}" of "${this._instanceOpts.length}" from instanceOpts (count: ${this.servers.length + 1}):`, opts);
this.servers.push(this._initServer(this.getInstanceOpts(opts, keyfilePath)));
});
while (this.servers.length < this._replSetOpts.count) {
log(`initAllServers: starting extra server "${this.servers.length + 1}" of "${this._replSetOpts.count}" (count: ${this.servers.length + 1})`);
this.servers.push(this._initServer(this.getInstanceOpts(undefined, keyfilePath)));
}
log('initAllServers: waiting for all servers to finish starting');
// ensures all servers are listening for connection
await Promise.all(this.servers.map((s) => s.start()));
log('initAllServers: finished starting all servers initially');
}

@@ -307,88 +295,90 @@ /**

*/
ensureKeyFile() {
var _a;
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
log('ensureKeyFile');
if ((0, utils_1.isNullOrUndefined)(this._keyfiletmp)) {
this._keyfiletmp = yield (0, utils_1.createTmpDir)('mongo-mem-keyfile-');
}
const keyfilepath = (0, path_1.resolve)(this._keyfiletmp, 'keyfile');
// if path does not exist or have no access, create it (or fail)
if (!(yield (0, utils_1.statPath)(keyfilepath))) {
log('ensureKeyFile: creating Keyfile');
(0, utils_1.assertion)(typeof this._replSetOpts.auth === 'object', new errors_1.AuthNotObjectError());
yield fs_1.promises.writeFile((0, path_1.resolve)(this._keyfiletmp, 'keyfile'), (_a = this._replSetOpts.auth.keyfileContent) !== null && _a !== void 0 ? _a : '0123456789', { mode: 0o700 } // this is because otherwise mongodb errors with "permissions are too open" on unix systems
);
}
return this._keyfiletmp;
});
async ensureKeyFile() {
log('ensureKeyFile');
if ((0, utils_1.isNullOrUndefined)(this._keyfiletmp)) {
this._keyfiletmp = await (0, utils_1.createTmpDir)('mongo-mem-keyfile-');
}
const keyfilepath = (0, path_1.resolve)(this._keyfiletmp, 'keyfile');
// if path does not exist or have no access, create it (or fail)
if (!(await (0, utils_1.statPath)(keyfilepath))) {
log('ensureKeyFile: creating Keyfile');
(0, utils_1.assertion)(typeof this._replSetOpts.auth === 'object', new errors_1.AuthNotObjectError());
await fs_1.promises.writeFile((0, path_1.resolve)(this._keyfiletmp, 'keyfile'), this._replSetOpts.auth.keyfileContent ?? '0123456789', { mode: 0o700 } // this is because otherwise mongodb errors with "permissions are too open" on unix systems
);
}
return this._keyfiletmp;
}
stop(cleanupOptions) {
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
log(`stop: called by ${(0, utils_1.isNullOrUndefined)(process.exitCode) ? 'manual' : 'process exit'}`);
/** Default to cleanup temporary, but not custom dbpaths */
let cleanup = { doCleanup: true, force: false };
// handle the old way of setting wheter to cleanup or not
// TODO: for next major release (9.0), this should be removed
if (typeof cleanupOptions === 'boolean') {
cleanup.doCleanup = cleanupOptions;
}
// handle the new way of setting what and how to cleanup
if (typeof cleanupOptions === 'object') {
cleanup = cleanupOptions;
}
if (this._state === MongoMemoryReplSetStates.stopped) {
log('stop: state is "stopped", trying to stop / kill anyway');
}
const successfullyStopped = yield Promise.all(this.servers.map((s) => s.stop({ doCleanup: false, force: false })))
.then(() => {
this.stateChange(MongoMemoryReplSetStates.stopped);
return true;
})
.catch((err) => {
log('stop:', err);
this.stateChange(MongoMemoryReplSetStates.stopped, err);
return false;
});
// return early if the instances failed to stop
if (!successfullyStopped) {
return false;
}
if (cleanup.doCleanup) {
yield this.cleanup(cleanup);
}
/**
* Stop the underlying `mongod` instance(s).
* @param cleanupOptions Set how to run ".cleanup", by default only `{ doCleanup: true }` is used
*/
async stop(cleanupOptions) {
log(`stop: called by ${(0, utils_1.isNullOrUndefined)(process.exitCode) ? 'manual' : 'process exit'}`);
/** Default to cleanup temporary, but not custom dbpaths */
let cleanup = { doCleanup: true, force: false };
// TODO: for next major release (10.0), this should be removed
if (typeof cleanupOptions === 'boolean') {
throw new Error('Unsupported argument type: boolean');
}
// handle the new way of setting what and how to cleanup
if (typeof cleanupOptions === 'object') {
cleanup = cleanupOptions;
}
if (this._state === MongoMemoryReplSetStates.stopped) {
log('stop: state is "stopped", trying to stop / kill anyway');
}
const successfullyStopped = await Promise.all(this.servers.map((s) => s.stop({ doCleanup: false, force: false })))
.then(() => {
this.stateChange(MongoMemoryReplSetStates.stopped);
return true;
})
.catch((err) => {
log('stop:', err);
this.stateChange(MongoMemoryReplSetStates.stopped, err);
return false;
});
// return early if the instances failed to stop
if (!successfullyStopped) {
return false;
}
if (cleanup.doCleanup) {
await this.cleanup(cleanup);
}
return true;
}
cleanup(options) {
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
assertionIsMMSRSState(MongoMemoryReplSetStates.stopped, this._state);
log(`cleanup for "${this.servers.length}" servers`);
/** Default to doing cleanup, but not forcing it */
let cleanup = { doCleanup: true, force: false };
// handle the old way of setting wheter to cleanup or not
// TODO: for next major release (9.0), this should be removed
if (typeof options === 'boolean') {
cleanup.force = options;
}
// handle the new way of setting what and how to cleanup
if (typeof options === 'object') {
cleanup = options;
}
log(`cleanup:`, cleanup);
// dont do cleanup, if "doCleanup" is false
if (!cleanup.doCleanup) {
log('cleanup: "doCleanup" is set to false');
return;
}
yield Promise.all(this.servers.map((s) => s.cleanup(cleanup)));
// cleanup the keyfile tmpdir
if (!(0, utils_1.isNullOrUndefined)(this._keyfiletmp)) {
yield (0, utils_1.removeDir)(this._keyfiletmp);
this._keyfiletmp = undefined;
}
this.servers = [];
this._ranCreateAuth = false;
/**
* Remove the defined dbPath's
* @param options Set how to run a cleanup, by default `{ doCleanup: true }` is used
* @throws If "state" is not "stopped"
* @throws If "instanceInfo" is not defined
* @throws If an fs error occured
*/
async cleanup(options) {
assertionIsMMSRSState(MongoMemoryReplSetStates.stopped, this._state);
log(`cleanup for "${this.servers.length}" servers`);
/** Default to doing cleanup, but not forcing it */
let cleanup = { doCleanup: true, force: false };
// TODO: for next major release (10.0), this should be removed
if (typeof options === 'boolean') {
throw new Error('Unsupported argument type: boolean');
}
// handle the new way of setting what and how to cleanup
if (typeof options === 'object') {
cleanup = options;
}
log(`cleanup:`, cleanup);
// dont do cleanup, if "doCleanup" is false
if (!cleanup.doCleanup) {
log('cleanup: "doCleanup" is set to false');
return;
});
}
await Promise.all(this.servers.map((s) => s.cleanup(cleanup)));
// cleanup the keyfile tmpdir
if (!(0, utils_1.isNullOrUndefined)(this._keyfiletmp)) {
await (0, utils_1.removeDir)(this._keyfiletmp);
this._keyfiletmp = undefined;
}
this.servers = [];
this._ranCreateAuth = false;
return;
}

@@ -399,29 +389,27 @@ /**

*/
waitUntilRunning() {
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
yield (0, utils_1.ensureAsync)();
log('waitUntilRunning:', this._state);
switch (this._state) {
case MongoMemoryReplSetStates.running:
// just return immediatly if the replSet is already running
return;
case MongoMemoryReplSetStates.init:
// wait for event "running"
yield new Promise((res) => {
// the use of "this" here can be done because "on" either binds "this" or uses an arrow function
function waitRunning(state) {
// this is because other states can be emitted multiple times (like stopped & init for auth creation)
if (state === MongoMemoryReplSetStates.running) {
this.removeListener(MongoMemoryReplSetEvents.stateChange, waitRunning);
res();
}
async waitUntilRunning() {
await (0, utils_1.ensureAsync)();
log('waitUntilRunning:', this._state);
switch (this._state) {
case MongoMemoryReplSetStates.running:
// just return immediatly if the replSet is already running
return;
case MongoMemoryReplSetStates.init:
// wait for event "running"
await new Promise((res) => {
// the use of "this" here can be done because "on" either binds "this" or uses an arrow function
function waitRunning(state) {
// this is because other states can be emitted multiple times (like stopped & init for auth creation)
if (state === MongoMemoryReplSetStates.running) {
this.removeListener(MongoMemoryReplSetEvents.stateChange, waitRunning);
res();
}
this.on(MongoMemoryReplSetEvents.stateChange, waitRunning);
});
return;
case MongoMemoryReplSetStates.stopped:
default:
throw new errors_1.StateError([MongoMemoryReplSetStates.running, MongoMemoryReplSetStates.init], this.state);
}
});
}
this.on(MongoMemoryReplSetEvents.stateChange, waitRunning);
});
return;
case MongoMemoryReplSetStates.stopped:
default:
throw new errors_1.StateError([MongoMemoryReplSetStates.running, MongoMemoryReplSetStates.init], this.state);
}
}

@@ -435,71 +423,74 @@ /**

*/
_initReplSet() {
var _a, _b, _c;
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
log('_initReplSet');
assertionIsMMSRSState(MongoMemoryReplSetStates.init, this._state);
(0, utils_1.assertion)(this.servers.length > 0, new Error('One or more servers are required.'));
const uris = this.servers.map((server) => server.getUri());
const isInMemory = ((_a = this.servers[0].instanceInfo) === null || _a === void 0 ? void 0 : _a.storageEngine) === 'ephemeralForTest';
const extraOptions = this._ranCreateAuth
? (_c = (_b = this.servers[0].instanceInfo) === null || _b === void 0 ? void 0 : _b.instance.extraConnectionOptions) !== null && _c !== void 0 ? _c : {}
: {};
const con = yield mongodb_1.MongoClient.connect(uris[0], Object.assign({
// somehow since mongodb-nodejs 4.0, this option is needed when the server is set to be in a replset
directConnection: true }, extraOptions));
log('_initReplSet: connected');
// try-finally to close connection in any case
async _initReplSet() {
log('_initReplSet');
assertionIsMMSRSState(MongoMemoryReplSetStates.init, this._state);
(0, utils_1.assertion)(this.servers.length > 0, new Error('One or more servers are required.'));
const uris = this.servers.map((server) => server.getUri());
const isInMemory = this.servers[0].instanceInfo?.storageEngine === 'ephemeralForTest';
const extraOptions = this._ranCreateAuth
? this.servers[0].instanceInfo?.instance.extraConnectionOptions ?? {}
: {};
const con = await mongodb_1.MongoClient.connect(uris[0], {
// somehow since mongodb-nodejs 4.0, this option is needed when the server is set to be in a replset
directConnection: true,
...extraOptions,
});
log('_initReplSet: connected');
// try-finally to close connection in any case
try {
const adminDb = con.db('admin');
const members = uris.map((uri, index) => ({
_id: index,
host: (0, utils_1.getHost)(uri),
...(this.servers[index].opts.instance?.replicaMemberConfig || {}), // Overwrite replica member config
}));
const rsConfig = {
_id: this._replSetOpts.name,
members,
writeConcernMajorityJournalDefault: !isInMemory,
settings: {
electionTimeoutMillis: 500,
...this._replSetOpts.configSettings,
},
};
// try-catch because the first "command" can fail
try {
const adminDb = con.db('admin');
const members = uris.map((uri, index) => {
var _a;
return (Object.assign({ _id: index, host: (0, utils_1.getHost)(uri) }, (((_a = this.servers[index].opts.instance) === null || _a === void 0 ? void 0 : _a.replicaMemberConfig) || {})));
});
const rsConfig = {
_id: this._replSetOpts.name,
members,
writeConcernMajorityJournalDefault: !isInMemory,
settings: Object.assign({ electionTimeoutMillis: 500 }, this._replSetOpts.configSettings),
};
// try-catch because the first "command" can fail
try {
log('_initReplSet: trying "replSetInitiate"');
yield adminDb.command({ replSetInitiate: rsConfig });
if (this.enableAuth()) {
log('_initReplSet: "enableAuth" returned "true"');
yield this._waitForPrimary(undefined, '_initReplSet authIsObject');
// find the primary instance to run createAuth on
const primary = this.servers.find((server) => { var _a; return (_a = server.instanceInfo) === null || _a === void 0 ? void 0 : _a.instance.isInstancePrimary; });
(0, utils_1.assertion)(!(0, utils_1.isNullOrUndefined)(primary), new Error('No Primary found'));
// this should be defined at this point, but is checked anyway (thanks to types)
(0, utils_1.assertion)(!(0, utils_1.isNullOrUndefined)(primary.instanceInfo), new errors_1.InstanceInfoError('_initReplSet authIsObject primary'));
yield con.close(); // just ensuring that no timeouts happen or conflicts happen
yield primary.createAuth(primary.instanceInfo);
this._ranCreateAuth = true;
}
log('_initReplSet: trying "replSetInitiate"');
await adminDb.command({ replSetInitiate: rsConfig });
if (this.enableAuth()) {
log('_initReplSet: "enableAuth" returned "true"');
await this._waitForPrimary(undefined, '_initReplSet authIsObject');
// find the primary instance to run createAuth on
const primary = this.servers.find((server) => server.instanceInfo?.instance.isInstancePrimary);
(0, utils_1.assertion)(!(0, utils_1.isNullOrUndefined)(primary), new Error('No Primary found'));
// this should be defined at this point, but is checked anyway (thanks to types)
(0, utils_1.assertion)(!(0, utils_1.isNullOrUndefined)(primary.instanceInfo), new errors_1.InstanceInfoError('_initReplSet authIsObject primary'));
await con.close(); // just ensuring that no timeouts happen or conflicts happen
await primary.createAuth(primary.instanceInfo);
this._ranCreateAuth = true;
}
catch (err) {
if (err instanceof mongodb_1.MongoError && err.errmsg == 'already initialized') {
log(`_initReplSet: "${err.errmsg}": trying to set old config`);
const { config: oldConfig } = yield adminDb.command({ replSetGetConfig: 1 });
log('_initReplSet: got old config:\n', oldConfig);
yield adminDb.command({
replSetReconfig: oldConfig,
force: true,
});
}
else {
throw err;
}
}
catch (err) {
if (err instanceof mongodb_1.MongoError && err.errmsg == 'already initialized') {
log(`_initReplSet: "${err.errmsg}": trying to set old config`);
const { config: oldConfig } = await adminDb.command({ replSetGetConfig: 1 });
log('_initReplSet: got old config:\n', oldConfig);
await adminDb.command({
replSetReconfig: oldConfig,
force: true,
});
}
log('_initReplSet: ReplSet-reconfig finished');
yield this._waitForPrimary(undefined, '_initReplSet beforeRunning');
this.stateChange(MongoMemoryReplSetStates.running);
log('_initReplSet: running');
else {
throw err;
}
}
finally {
log('_initReplSet: finally closing connection');
yield con.close();
}
});
log('_initReplSet: ReplSet-reconfig finished');
await this._waitForPrimary(undefined, '_initReplSet beforeRunning');
this.stateChange(MongoMemoryReplSetStates.running);
log('_initReplSet: running');
}
finally {
log('_initReplSet: finally closing connection');
await con.close();
}
}

@@ -526,32 +517,30 @@ /**

*/
_waitForPrimary(timeout = 1000 * 30, where) {
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
log('_waitForPrimary: Waiting for a Primary');
let timeoutId;
// "race" because not all servers will be a primary
yield Promise.race([
...this.servers.map((server) => new Promise((res, rej) => {
const instanceInfo = server.instanceInfo;
// this should be defined at this point, but is checked anyway (thanks to types)
if ((0, utils_1.isNullOrUndefined)(instanceInfo)) {
return rej(new errors_1.InstanceInfoError('_waitForPrimary Primary race'));
}
instanceInfo.instance.once(MongoInstance_1.MongoInstanceEvents.instancePrimary, res);
if (instanceInfo.instance.isInstancePrimary) {
log('_waitForPrimary: found instance being already primary');
res();
}
})),
new Promise((_res, rej) => {
timeoutId = setTimeout(() => {
Promise.all([...this.servers.map((v) => v.stop())]); // this is not chained with "rej", this is here just so things like jest can exit at some point
rej(new errors_1.WaitForPrimaryTimeoutError(timeout, where));
}, timeout);
}),
]);
if (!(0, utils_1.isNullOrUndefined)(timeoutId)) {
clearTimeout(timeoutId);
}
log('_waitForPrimary: detected one primary instance ');
});
async _waitForPrimary(timeout = 1000 * 30, where) {
log('_waitForPrimary: Waiting for a Primary');
let timeoutId;
// "race" because not all servers will be a primary
await Promise.race([
...this.servers.map((server) => new Promise((res, rej) => {
const instanceInfo = server.instanceInfo;
// this should be defined at this point, but is checked anyway (thanks to types)
if ((0, utils_1.isNullOrUndefined)(instanceInfo)) {
return rej(new errors_1.InstanceInfoError('_waitForPrimary Primary race'));
}
instanceInfo.instance.once(MongoInstance_1.MongoInstanceEvents.instancePrimary, res);
if (instanceInfo.instance.isInstancePrimary) {
log('_waitForPrimary: found instance being already primary');
res();
}
})),
new Promise((_res, rej) => {
timeoutId = setTimeout(() => {
Promise.all([...this.servers.map((v) => v.stop())]); // this is not chained with "rej", this is here just so things like jest can exit at some point
rej(new errors_1.WaitForPrimaryTimeoutError(timeout, where));
}, timeout);
}),
]);
if (!(0, utils_1.isNullOrUndefined)(timeoutId)) {
clearTimeout(timeoutId);
}
log('_waitForPrimary: detected one primary instance ');
}

@@ -558,0 +547,0 @@ }

/// <reference types="node" />
/// <reference types="node" />
import { SpawnOptions } from 'child_process';

@@ -7,7 +8,13 @@ import { ManagerAdvanced, Cleanup } from './util/utils';

import { EventEmitter } from 'events';
import { AddUserOptions } from 'mongodb';
/**
* Type with automatic options removed
* "auth" is automatically handled and set via {@link AutomaticAuth}
*/
export type MemoryServerInstanceOpts = Omit<MongoMemoryInstanceOpts, 'auth'>;
/**
* MongoMemoryServer Stored Options
*/
export interface MongoMemoryServerOpts {
instance?: MongoMemoryInstanceOpts;
instance?: MemoryServerInstanceOpts;
binary?: MongoBinaryOpts;

@@ -22,6 +29,6 @@ spawn?: SpawnOptions;

/**
* Disable Automatic User creation
* @default false because when defining this object it usually means that AutomaticAuth is wanted
* Enable Automatic User creation
* @default false
*/
disable?: boolean;
enable?: boolean;
/**

@@ -94,3 +101,3 @@ * Extra Users to create besides the root user

*/
export declare type UserRoles = 'read' | 'readWrite' | 'dbAdmin' | 'dbOwner' | 'userAdmin' | 'clusterAdmin' | 'clusterManager' | 'clusterMonitor' | 'hostManager' | 'backup' | 'restore' | 'readAnyDatabase' | 'readWriteAnyDatabase' | 'userAdminAnyDatabase' | 'dbAdminAnyDatabase' | 'root' | string;
export type UserRoles = 'read' | 'readWrite' | 'dbAdmin' | 'dbOwner' | 'userAdmin' | 'clusterAdmin' | 'clusterManager' | 'clusterMonitor' | 'hostManager' | 'backup' | 'restore' | 'readAnyDatabase' | 'readWriteAnyDatabase' | 'userAdminAnyDatabase' | 'dbAdminAnyDatabase' | 'root' | string;
/**

@@ -122,6 +129,3 @@ * Interface options for "db.createUser" (used for this package)

*/
roles: ({
role: UserRoles;
db: string;
} | UserRoles)[];
roles: AddUserOptions['roles'];
/**

@@ -229,9 +233,2 @@ * Specify the specific SCRAM mechanism or mechanisms for creating SCRAM user credentials.

* Stop the current In-Memory Instance
* @param runCleanup run "this.cleanup"? (remove dbPath & reset "instanceInfo")
*
* @deprecated replace argument with `Cleanup` interface object
*/
stop(runCleanup: boolean): Promise<boolean>;
/**
* Stop the current In-Memory Instance
* @param cleanupOptions Set how to run ".cleanup", by default only `{ doCleanup: true }` is used

@@ -242,12 +239,2 @@ */

* Remove the defined dbPath
* @param force Remove the dbPath even if it is no "tmpDir" (and re-check if tmpDir actually removed it)
* @throws If "state" is not "stopped"
* @throws If "instanceInfo" is not defined
* @throws If an fs error occured
*
* @deprecated replace argument with `Cleanup` interface object
*/
cleanup(force: boolean): Promise<void>;
/**
* Remove the defined dbPath
* @param options Set how to run a cleanup, by default `{ doCleanup: true }` is used

@@ -254,0 +241,0 @@ * @throws If "state" is not "stopped"

@@ -5,6 +5,6 @@ "use strict";

const tslib_1 = require("tslib");
const get_port_1 = (0, tslib_1.__importDefault)(require("get-port"));
const getport_1 = require("./util/getport");
const utils_1 = require("./util/utils");
const MongoInstance_1 = require("./util/MongoInstance");
const debug_1 = (0, tslib_1.__importDefault)(require("debug"));
const debug_1 = tslib_1.__importDefault(require("debug"));
const events_1 = require("events");

@@ -14,3 +14,3 @@ const fs_1 = require("fs");

const errors_1 = require("./util/errors");
const os = (0, tslib_1.__importStar)(require("os"));
const os = tslib_1.__importStar(require("os"));
const log = (0, debug_1.default)('MongoMS:MongoMemoryServer');

@@ -45,5 +45,9 @@ /**

this._state = MongoMemoryServerStates.new;
this.opts = Object.assign({}, opts);
// TODO: consider changing this to not be set if "instance.auth" is false in 9.0
if (!(0, utils_1.isNullOrUndefined)(this.opts.auth)) {
this.opts = { ...opts };
// instance option "auth" will be automatically set and handled via AutomaticAuth
if ('auth' in (this.opts.instance ?? {})) {
log('opts.instance.auth was defined, but will be set automatically, ignoring');
delete this.opts.instance?.auth;
}
if (this.opts.auth?.enable === true) {
// assign defaults

@@ -57,9 +61,7 @@ this.auth = (0, utils_1.authDefault)(this.opts.auth);

*/
static create(opts) {
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
log('create: Called .create() method');
const instance = new MongoMemoryServer(Object.assign({}, opts));
yield instance.start();
return instance;
});
static async create(opts) {
log('create: Called .create() method');
const instance = new MongoMemoryServer({ ...opts });
await instance.start();
return instance;
}

@@ -71,36 +73,32 @@ /**

*/
start(forceSamePort = false) {
var _a;
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
this.debug('start: Called .start() method');
switch (this._state) {
case MongoMemoryServerStates.new:
case MongoMemoryServerStates.stopped:
break;
case MongoMemoryServerStates.running:
case MongoMemoryServerStates.starting:
default:
throw new errors_1.StateError([MongoMemoryServerStates.new, MongoMemoryServerStates.stopped], this.state);
async start(forceSamePort = false) {
this.debug('start: Called .start() method');
switch (this._state) {
case MongoMemoryServerStates.new:
case MongoMemoryServerStates.stopped:
break;
case MongoMemoryServerStates.running:
case MongoMemoryServerStates.starting:
default:
throw new errors_1.StateError([MongoMemoryServerStates.new, MongoMemoryServerStates.stopped], this.state);
}
(0, utils_1.assertion)((0, utils_1.isNullOrUndefined)(this._instanceInfo?.instance.mongodProcess), new Error('Cannot start because "instance.mongodProcess" is already defined!'));
this.stateChange(MongoMemoryServerStates.starting);
await this._startUpInstance(forceSamePort).catch(async (err) => {
// add error information on macos-arm because "spawn Unknown system error -86" does not say much
if (err instanceof Error && err.message?.includes('spawn Unknown system error -86')) {
if (os.platform() === 'darwin' && os.arch() === 'arm64') {
err.message += err.message += ', Is Rosetta Installed and Setup correctly?';
}
}
(0, utils_1.assertion)((0, utils_1.isNullOrUndefined)((_a = this._instanceInfo) === null || _a === void 0 ? void 0 : _a.instance.mongodProcess), new Error('Cannot start because "instance.mongodProcess" is already defined!'));
this.stateChange(MongoMemoryServerStates.starting);
yield this._startUpInstance(forceSamePort).catch((err) => (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
var _b;
// add error information on macos-arm because "spawn Unknown system error -86" does not say much
if (err instanceof Error && ((_b = err.message) === null || _b === void 0 ? void 0 : _b.includes('spawn Unknown system error -86'))) {
if (os.platform() === 'darwin' && os.arch() === 'arm64') {
err.message += err.message += ', Is Rosetta Installed and Setup correctly?';
}
}
if (!debug_1.default.enabled('MongoMS:MongoMemoryServer')) {
console.warn('Starting the MongoMemoryServer Instance failed, enable debug log for more information. Error:\n', err);
}
this.debug('_startUpInstance threw a Error: ', err);
yield this.stop({ doCleanup: false, force: false }); // still try to close the instance that was spawned, without cleanup for investigation
this.stateChange(MongoMemoryServerStates.stopped);
throw err;
}));
this.stateChange(MongoMemoryServerStates.running);
this.debug('start: Instance fully Started');
if (!debug_1.default.enabled('MongoMS:MongoMemoryServer')) {
console.warn('Starting the MongoMemoryServer Instance failed, enable debug log for more information. Error:\n', err);
}
this.debug('_startUpInstance threw a Error: ', err);
await this.stop({ doCleanup: false, force: false }); // still try to close the instance that was spawned, without cleanup for investigation
this.stateChange(MongoMemoryServerStates.stopped);
throw err;
});
this.stateChange(MongoMemoryServerStates.running);
this.debug('start: Instance fully Started');
}

@@ -120,4 +118,3 @@ /**

debug(msg, ...extra) {
var _a, _b;
const port = (_b = (_a = this._instanceInfo) === null || _a === void 0 ? void 0 : _a.port) !== null && _b !== void 0 ? _b : 'unknown';
const port = this._instanceInfo?.port ?? 'unknown';
log(`Mongo[${port}]: ${msg}`, ...extra);

@@ -129,11 +126,9 @@ }

*/
getNewPort(port) {
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
const newPort = yield (0, get_port_1.default)({ port });
// only log this message if a custom port was provided
if (port != newPort && typeof port === 'number') {
this.debug(`getNewPort: starting with port "${newPort}", since "${port}" was locked`);
}
return newPort;
});
async getNewPort(port) {
const newPort = await (0, getport_1.getFreePort)(port);
// only log this message if a custom port was provided
if (port != newPort && typeof port === 'number') {
this.debug(`getNewPort: starting with port "${newPort}", since "${port}" was locked`);
}
return newPort;
}

@@ -143,62 +138,62 @@ /**

*/
getStartOptions(forceSamePort = false) {
var _a, _b, _c;
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
this.debug(`getStartOptions: forceSamePort: ${forceSamePort}`);
/** Shortcut to this.opts.instance */
const instOpts = (_a = this.opts.instance) !== null && _a !== void 0 ? _a : {};
/**
* This variable is used for determining if "createAuth" should be run
*/
let isNew = true;
// use pre-defined port if available, otherwise generate a new port
let port = typeof instOpts.port === 'number' ? instOpts.port : undefined;
// if "forceSamePort" is not true, and get a available port
if (!forceSamePort || (0, utils_1.isNullOrUndefined)(port)) {
port = yield this.getNewPort(port);
async getStartOptions(forceSamePort = false) {
this.debug(`getStartOptions: forceSamePort: ${forceSamePort}`);
/** Shortcut to this.opts.instance */
const instOpts = this.opts.instance ?? {};
/**
* This variable is used for determining if "createAuth" should be run
*/
let isNew = true;
// use pre-defined port if available, otherwise generate a new port
let port = typeof instOpts.port === 'number' ? instOpts.port : undefined;
// if "forceSamePort" is not true, and get a available port
if (!forceSamePort || (0, utils_1.isNullOrUndefined)(port)) {
port = await this.getNewPort(port);
}
// consider directly using "this.opts.instance", to pass through all options, even if not defined in "StartupInstanceData"
const data = {
port: port,
dbName: (0, utils_1.generateDbName)(instOpts.dbName),
ip: instOpts.ip ?? '127.0.0.1',
storageEngine: instOpts.storageEngine ?? 'ephemeralForTest',
replSet: instOpts.replSet,
dbPath: instOpts.dbPath,
tmpDir: undefined,
keyfileLocation: instOpts.keyfileLocation,
launchTimeout: instOpts.launchTimeout,
};
if ((0, utils_1.isNullOrUndefined)(this._instanceInfo)) {
// create a tmpDir instance if no "dbPath" is given
if (!data.dbPath) {
data.tmpDir = await (0, utils_1.createTmpDir)('mongo-mem-');
data.dbPath = data.tmpDir;
isNew = true; // just to ensure "isNew" is "true" because a new temporary directory got created
}
// consider directly using "this.opts.instance", to pass through all options, even if not defined in "StartupInstanceData"
const data = {
port: port,
dbName: (0, utils_1.generateDbName)(instOpts.dbName),
ip: (_b = instOpts.ip) !== null && _b !== void 0 ? _b : '127.0.0.1',
storageEngine: (_c = instOpts.storageEngine) !== null && _c !== void 0 ? _c : 'ephemeralForTest',
replSet: instOpts.replSet,
dbPath: instOpts.dbPath,
tmpDir: undefined,
keyfileLocation: instOpts.keyfileLocation,
launchTimeout: instOpts.launchTimeout,
};
if ((0, utils_1.isNullOrUndefined)(this._instanceInfo)) {
// create a tmpDir instance if no "dbPath" is given
if (!data.dbPath) {
data.tmpDir = yield (0, utils_1.createTmpDir)('mongo-mem-');
data.dbPath = data.tmpDir;
isNew = true; // just to ensure "isNew" is "true" because a new temporary directory got created
}
else {
this.debug(`getStartOptions: Checking if "${data.dbPath}}" (no new tmpDir) already has data`);
const files = yield fs_1.promises.readdir(data.dbPath);
isNew = files.length === 0; // if there are no files in the directory, assume that the database is new
}
}
else {
isNew = false;
this.debug(`getStartOptions: Checking if "${data.dbPath}}" (no new tmpDir) already has data`);
const files = await fs_1.promises.readdir(data.dbPath);
isNew = files.length === 0; // if there are no files in the directory, assume that the database is new
}
const enableAuth = (typeof instOpts.auth === 'boolean' ? instOpts.auth : true) && // check if auth is even meant to be enabled
this.authObjectEnable();
const createAuth = enableAuth && // re-use all the checks from "enableAuth"
!(0, utils_1.isNullOrUndefined)(this.auth) && // needs to be re-checked because typescript complains
(this.auth.force || isNew) && // check that either "isNew" or "this.auth.force" is "true"
!instOpts.replSet; // dont run "createAuth" when its a replset, it will be run by the replset controller
return {
data: data,
createAuth: createAuth,
mongodOptions: {
instance: Object.assign(Object.assign({}, data), { args: instOpts.args, auth: enableAuth }),
binary: this.opts.binary,
spawn: this.opts.spawn,
}
else {
isNew = false;
}
const enableAuth = this.authObjectEnable();
const createAuth = enableAuth && // re-use all the checks from "enableAuth"
!(0, utils_1.isNullOrUndefined)(this.auth) && // needs to be re-checked because typescript complains
(this.auth.force || isNew) && // check that either "isNew" or "this.auth.force" is "true"
!instOpts.replSet; // dont run "createAuth" when its a replset, it will be run by the replset controller
return {
data: data,
createAuth: createAuth,
mongodOptions: {
instance: {
...data,
args: instOpts.args,
auth: enableAuth,
},
};
});
binary: this.opts.binary,
spawn: this.opts.spawn,
},
};
}

@@ -210,126 +205,124 @@ /**

*/
_startUpInstance(forceSamePort = false) {
var _a, _b;
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
this.debug('_startUpInstance: Called MongoMemoryServer._startUpInstance() method');
if (!(0, utils_1.isNullOrUndefined)(this._instanceInfo)) {
this.debug('_startUpInstance: "instanceInfo" already defined, reusing instance');
if (!forceSamePort) {
const newPort = yield this.getNewPort(this._instanceInfo.port);
this._instanceInfo.instance.instanceOpts.port = newPort;
this._instanceInfo.port = newPort;
}
yield this._instanceInfo.instance.start();
return;
async _startUpInstance(forceSamePort = false) {
this.debug('_startUpInstance: Called MongoMemoryServer._startUpInstance() method');
if (!(0, utils_1.isNullOrUndefined)(this._instanceInfo)) {
this.debug('_startUpInstance: "instanceInfo" already defined, reusing instance');
if (!forceSamePort) {
const newPort = await this.getNewPort(this._instanceInfo.port);
this._instanceInfo.instance.instanceOpts.port = newPort;
this._instanceInfo.port = newPort;
}
const { mongodOptions, createAuth, data } = yield this.getStartOptions(forceSamePort);
this.debug(`_startUpInstance: Creating new MongoDB instance with options:`, mongodOptions);
const instance = yield MongoInstance_1.MongoInstance.create(mongodOptions);
this.debug(`_startUpInstance: Instance Started, createAuth: "${createAuth}"`);
this._instanceInfo = Object.assign(Object.assign({}, data), { dbPath: data.dbPath, // because otherwise the types would be incompatible
instance });
// always set the "extraConnectionOptions" when "auth" is enabled, regardless of if "createAuth" gets run
if (this.authObjectEnable() &&
((_a = mongodOptions.instance) === null || _a === void 0 ? void 0 : _a.auth) === true &&
!(0, utils_1.isNullOrUndefined)(this.auth) // extra check again for typescript, because it cant reuse checks from "enableAuth" yet
) {
instance.extraConnectionOptions = {
authSource: 'admin',
authMechanism: 'SCRAM-SHA-256',
auth: {
username: this.auth.customRootName,
password: this.auth.customRootPwd,
},
};
}
// "isNullOrUndefined" because otherwise typescript complains about "this.auth" possibly being not defined
if (!(0, utils_1.isNullOrUndefined)(this.auth) && createAuth) {
this.debug(`_startUpInstance: Running "createAuth" (force: "${this.auth.force}")`);
yield this.createAuth(data);
}
else {
// extra "if" to log when "disable" is set to "true"
if ((_b = this.opts.auth) === null || _b === void 0 ? void 0 : _b.disable) {
this.debug('_startUpInstance: AutomaticAuth.disable is set to "true" skipping "createAuth"');
}
}
});
await this._instanceInfo.instance.start();
return;
}
const { mongodOptions, createAuth, data } = await this.getStartOptions(forceSamePort);
this.debug(`_startUpInstance: Creating new MongoDB instance with options:`, mongodOptions);
const instance = await MongoInstance_1.MongoInstance.create(mongodOptions);
this.debug(`_startUpInstance: Instance Started, createAuth: "${createAuth}"`);
this._instanceInfo = {
...data,
dbPath: data.dbPath,
instance,
};
// always set the "extraConnectionOptions" when "auth" is enabled, regardless of if "createAuth" gets run
if (this.authObjectEnable() &&
mongodOptions.instance?.auth === true &&
!(0, utils_1.isNullOrUndefined)(this.auth) // extra check again for typescript, because it cant reuse checks from "enableAuth" yet
) {
instance.extraConnectionOptions = {
authSource: 'admin',
authMechanism: 'SCRAM-SHA-256',
auth: {
username: this.auth.customRootName,
password: this.auth.customRootPwd,
},
};
}
// "isNullOrUndefined" because otherwise typescript complains about "this.auth" possibly being not defined
if (!(0, utils_1.isNullOrUndefined)(this.auth) && createAuth) {
this.debug(`_startUpInstance: Running "createAuth" (force: "${this.auth.force}")`);
await this.createAuth(data);
}
}
stop(cleanupOptions) {
var _a, _b;
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
this.debug('stop: Called .stop() method');
/** Default to cleanup temporary, but not custom dbpaths */
let cleanup = { doCleanup: true, force: false };
// handle the old way of setting wheter to cleanup or not
// TODO: for next major release (9.0), this should be removed
if (typeof cleanupOptions === 'boolean') {
cleanup.doCleanup = cleanupOptions;
}
// handle the new way of setting what and how to cleanup
if (typeof cleanupOptions === 'object') {
cleanup = cleanupOptions;
}
// just return "true" if there was never an instance
if ((0, utils_1.isNullOrUndefined)(this._instanceInfo)) {
this.debug('stop: "instanceInfo" is not defined (never ran?)');
return false;
}
if (this._state === MongoMemoryServerStates.stopped) {
this.debug('stop: state is "stopped", trying to stop / kill anyway');
}
this.debug(`stop: Stopping MongoDB server on port ${this._instanceInfo.port} with pid ${(_b = (_a = this._instanceInfo.instance) === null || _a === void 0 ? void 0 : _a.mongodProcess) === null || _b === void 0 ? void 0 : _b.pid}` // "undefined" would say more than ""
);
yield this._instanceInfo.instance.stop();
this.stateChange(MongoMemoryServerStates.stopped);
if (cleanup.doCleanup) {
yield this.cleanup(cleanup);
}
return true;
});
/**
* Stop the current In-Memory Instance
* @param cleanupOptions Set how to run ".cleanup", by default only `{ doCleanup: true }` is used
*/
async stop(cleanupOptions) {
this.debug('stop: Called .stop() method');
/** Default to cleanup temporary, but not custom dbpaths */
let cleanup = { doCleanup: true, force: false };
// TODO: for next major release (10.0), this should be removed
if (typeof cleanupOptions === 'boolean') {
throw new Error('Unsupported argument type: boolean');
}
// handle the new way of setting what and how to cleanup
if (typeof cleanupOptions === 'object') {
cleanup = cleanupOptions;
}
// just return "true" if there was never an instance
if ((0, utils_1.isNullOrUndefined)(this._instanceInfo)) {
this.debug('stop: "instanceInfo" is not defined (never ran?)');
return false;
}
if (this._state === MongoMemoryServerStates.stopped) {
this.debug('stop: state is "stopped", trying to stop / kill anyway');
}
this.debug(`stop: Stopping MongoDB server on port ${this._instanceInfo.port} with pid ${this._instanceInfo.instance?.mongodProcess?.pid}` // "undefined" would say more than ""
);
await this._instanceInfo.instance.stop();
this.stateChange(MongoMemoryServerStates.stopped);
if (cleanup.doCleanup) {
await this.cleanup(cleanup);
}
return true;
}
cleanup(options) {
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
assertionIsMMSState(MongoMemoryServerStates.stopped, this.state);
/** Default to doing cleanup, but not forcing it */
let cleanup = { doCleanup: true, force: false };
// handle the old way of setting wheter to cleanup or not
// TODO: for next major release (9.0), this should be removed
if (typeof options === 'boolean') {
cleanup.force = options;
/**
* Remove the defined dbPath
* @param options Set how to run a cleanup, by default `{ doCleanup: true }` is used
* @throws If "state" is not "stopped"
* @throws If "instanceInfo" is not defined
* @throws If an fs error occured
*/
async cleanup(options) {
assertionIsMMSState(MongoMemoryServerStates.stopped, this.state);
/** Default to doing cleanup, but not forcing it */
let cleanup = { doCleanup: true, force: false };
// TODO: for next major release (10.0), this should be removed
if (typeof options === 'boolean') {
throw new Error('Unsupported argument type: boolean');
}
// handle the new way of setting what and how to cleanup
if (typeof options === 'object') {
cleanup = options;
}
this.debug(`cleanup:`, cleanup);
// dont do cleanup, if "doCleanup" is false
if (!cleanup.doCleanup) {
this.debug('cleanup: "doCleanup" is set to false');
return;
}
if ((0, utils_1.isNullOrUndefined)(this._instanceInfo)) {
this.debug('cleanup: "instanceInfo" is undefined');
return;
}
(0, utils_1.assertion)((0, utils_1.isNullOrUndefined)(this._instanceInfo.instance.mongodProcess), new Error('Cannot cleanup because "instance.mongodProcess" is still defined'));
const tmpDir = this._instanceInfo.tmpDir;
if (!(0, utils_1.isNullOrUndefined)(tmpDir)) {
this.debug(`cleanup: removing tmpDir at ${tmpDir}`);
await (0, utils_1.removeDir)(tmpDir);
}
if (cleanup.force) {
const dbPath = this._instanceInfo.dbPath;
const res = await (0, utils_1.statPath)(dbPath);
if ((0, utils_1.isNullOrUndefined)(res)) {
this.debug(`cleanup: force is true, but path "${dbPath}" dosnt exist anymore`);
}
// handle the new way of setting what and how to cleanup
if (typeof options === 'object') {
cleanup = options;
else {
(0, utils_1.assertion)(res.isDirectory(), new Error('Defined dbPath is not a directory'));
await (0, utils_1.removeDir)(dbPath);
}
this.debug(`cleanup:`, cleanup);
// dont do cleanup, if "doCleanup" is false
if (!cleanup.doCleanup) {
this.debug('cleanup: "doCleanup" is set to false');
return;
}
if ((0, utils_1.isNullOrUndefined)(this._instanceInfo)) {
this.debug('cleanup: "instanceInfo" is undefined');
return;
}
(0, utils_1.assertion)((0, utils_1.isNullOrUndefined)(this._instanceInfo.instance.mongodProcess), new Error('Cannot cleanup because "instance.mongodProcess" is still defined'));
const tmpDir = this._instanceInfo.tmpDir;
if (!(0, utils_1.isNullOrUndefined)(tmpDir)) {
this.debug(`cleanup: removing tmpDir at ${tmpDir}`);
yield (0, utils_1.removeDir)(tmpDir);
}
if (cleanup.force) {
const dbPath = this._instanceInfo.dbPath;
const res = yield (0, utils_1.statPath)(dbPath);
if ((0, utils_1.isNullOrUndefined)(res)) {
this.debug(`cleanup: force is true, but path "${dbPath}" dosnt exist anymore`);
}
else {
(0, utils_1.assertion)(res.isDirectory(), new Error('Defined dbPath is not a directory'));
yield (0, utils_1.removeDir)(dbPath);
}
}
this.stateChange(MongoMemoryServerStates.new); // reset "state" to new, because the dbPath got removed
this._instanceInfo = undefined;
});
}
this.stateChange(MongoMemoryServerStates.new); // reset "state" to new, because the dbPath got removed
this._instanceInfo = undefined;
}

@@ -352,41 +345,37 @@ /**

*/
ensureInstance() {
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
this.debug('ensureInstance: Called .ensureInstance() method');
switch (this._state) {
case MongoMemoryServerStates.running:
if (this._instanceInfo) {
return this._instanceInfo;
async ensureInstance() {
this.debug('ensureInstance: Called .ensureInstance() method');
switch (this._state) {
case MongoMemoryServerStates.running:
if (this._instanceInfo) {
return this._instanceInfo;
}
throw new errors_1.InstanceInfoError('MongoMemoryServer.ensureInstance (state: running)');
case MongoMemoryServerStates.new:
case MongoMemoryServerStates.stopped:
break;
case MongoMemoryServerStates.starting:
return new Promise((res, rej) => this.once(MongoMemoryServerEvents.stateChange, (state) => {
if (state != MongoMemoryServerStates.running) {
rej(new Error(`"ensureInstance" waited for "running" but got a different state: "${state}"`));
return;
}
throw new errors_1.EnsureInstanceError(true);
case MongoMemoryServerStates.new:
case MongoMemoryServerStates.stopped:
break;
case MongoMemoryServerStates.starting:
return new Promise((res, rej) => this.once(MongoMemoryServerEvents.stateChange, (state) => {
if (state != MongoMemoryServerStates.running) {
rej(new Error(`"ensureInstance" waited for "running" but got a different state: "${state}"`));
return;
}
// this assertion is mainly for types (typescript otherwise would complain that "_instanceInfo" might be "undefined")
(0, utils_1.assertion)(!(0, utils_1.isNullOrUndefined)(this._instanceInfo), new Error('InstanceInfo is undefined!'));
res(this._instanceInfo);
}));
default:
throw new errors_1.StateError([
MongoMemoryServerStates.running,
MongoMemoryServerStates.new,
MongoMemoryServerStates.stopped,
MongoMemoryServerStates.starting,
], this.state);
}
this.debug('ensureInstance: no running instance, calling "start()" command');
yield this.start();
this.debug('ensureInstance: "start()" command was succesfully resolved');
// check again for 1. Typescript-type reasons and 2. if .start failed to throw an error
if (!this._instanceInfo) {
throw new errors_1.EnsureInstanceError(false);
}
return this._instanceInfo;
});
// this assertion is mainly for types (typescript otherwise would complain that "_instanceInfo" might be "undefined")
(0, utils_1.assertion)(!(0, utils_1.isNullOrUndefined)(this._instanceInfo), new Error('InstanceInfo is undefined!'));
res(this._instanceInfo);
}));
default:
throw new errors_1.StateError([
MongoMemoryServerStates.running,
MongoMemoryServerStates.new,
MongoMemoryServerStates.stopped,
MongoMemoryServerStates.starting,
], this.state);
}
this.debug('ensureInstance: no running instance, calling "start()" command');
await this.start();
this.debug('ensureInstance: "start()" command was succesfully resolved');
// check again for 1. Typescript-type reasons and 2. if .start failed to throw an error
(0, utils_1.assertion)(!!this._instanceInfo, new errors_1.InstanceInfoError('MongoMemoryServer.ensureInstance (after starting)'));
return this._instanceInfo;
}

@@ -420,67 +409,68 @@ /**

*/
createAuth(data) {
var _a, _b, _c, _d;
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
(0, utils_1.assertion)(!(0, utils_1.isNullOrUndefined)(this.auth), new Error('"createAuth" got called, but "this.auth" is undefined!'));
assertionInstanceInfo(this._instanceInfo);
this.debug('createAuth: options:', this.auth);
let con = yield mongodb_1.MongoClient.connect((0, utils_1.uriTemplate)(data.ip, data.port, 'admin'));
try {
let db = con.db('admin'); // just to ensure it is actually the "admin" database AND to have the "Db" data
// Create the root user
this.debug(`createAuth: Creating Root user, name: "${this.auth.customRootName}"`);
yield db.command({
createUser: this.auth.customRootName,
pwd: this.auth.customRootPwd,
mechanisms: ['SCRAM-SHA-256'],
customData: {
createdBy: 'mongodb-memory-server',
as: 'ROOTUSER',
},
roles: ['root'],
// "writeConcern" is needced, otherwise replset servers might fail with "auth failed: such user does not exist"
writeConcern: {
w: 'majority',
},
async createAuth(data) {
(0, utils_1.assertion)(!(0, utils_1.isNullOrUndefined)(this.auth), new Error('"createAuth" got called, but "this.auth" is undefined!'));
assertionInstanceInfo(this._instanceInfo);
this.debug('createAuth: options:', this.auth);
let con = await mongodb_1.MongoClient.connect((0, utils_1.uriTemplate)(data.ip, data.port, 'admin'));
try {
let db = con.db('admin'); // just to ensure it is actually the "admin" database AND to have the "Db" data
// Create the root user
this.debug(`createAuth: Creating Root user, name: "${this.auth.customRootName}"`);
await db.command({
createUser: this.auth.customRootName,
pwd: this.auth.customRootPwd,
mechanisms: ['SCRAM-SHA-256'],
customData: {
createdBy: 'mongodb-memory-server',
as: 'ROOTUSER',
},
roles: ['root'],
// "writeConcern" is needced, otherwise replset servers might fail with "auth failed: such user does not exist"
writeConcern: {
w: 'majority',
},
});
if (this.auth.extraUsers.length > 0) {
this.debug(`createAuth: Creating "${this.auth.extraUsers.length}" Custom Users`);
this.auth.extraUsers.sort((a, b) => {
if (a.database === 'admin') {
return -1; // try to make all "admin" at the start of the array
}
return a.database === b.database ? 0 : 1; // "0" to sort all databases that are the same after each other, and "1" to for pushing it back
});
if (this.auth.extraUsers.length > 0) {
this.debug(`createAuth: Creating "${this.auth.extraUsers.length}" Custom Users`);
this.auth.extraUsers.sort((a, b) => {
if (a.database === 'admin') {
return -1; // try to make all "admin" at the start of the array
}
return a.database === b.database ? 0 : 1; // "0" to sort all databases that are the same after each other, and "1" to for pushing it back
// reconnecting the database because the root user now exists and the "localhost exception" only allows the first user
await con.close();
con = await mongodb_1.MongoClient.connect(this.getUri('admin'), this._instanceInfo.instance.extraConnectionOptions ?? {});
db = con.db('admin');
for (const user of this.auth.extraUsers) {
user.database = (0, utils_1.isNullOrUndefined)(user.database) ? 'admin' : user.database;
// just to have not to call "con.db" everytime in the loop if its the same
if (user.database !== db.databaseName) {
db = con.db(user.database);
}
this.debug('createAuth: Creating User: ', user);
await db.command({
createUser: user.createUser,
pwd: user.pwd,
customData: {
...user.customData,
createdBy: 'mongodb-memory-server',
as: 'EXTRAUSER',
},
roles: user.roles,
authenticationRestrictions: user.authenticationRestrictions ?? [],
mechanisms: user.mechanisms ?? ['SCRAM-SHA-256'],
digestPassword: user.digestPassword ?? true,
// "writeConcern" is needced, otherwise replset servers might fail with "auth failed: such user does not exist"
writeConcern: {
w: 'majority',
},
});
// reconnecting the database because the root user now exists and the "localhost exception" only allows the first user
yield con.close();
con = yield mongodb_1.MongoClient.connect(this.getUri('admin'), (_a = this._instanceInfo.instance.extraConnectionOptions) !== null && _a !== void 0 ? _a : {});
db = con.db('admin');
for (const user of this.auth.extraUsers) {
user.database = (0, utils_1.isNullOrUndefined)(user.database) ? 'admin' : user.database;
// just to have not to call "con.db" everytime in the loop if its the same
if (user.database !== db.databaseName) {
db = con.db(user.database);
}
this.debug('createAuth: Creating User: ', user);
yield db.command({
createUser: user.createUser,
pwd: user.pwd,
customData: Object.assign(Object.assign({}, user.customData), { createdBy: 'mongodb-memory-server', as: 'EXTRAUSER' }),
roles: user.roles,
authenticationRestrictions: (_b = user.authenticationRestrictions) !== null && _b !== void 0 ? _b : [],
mechanisms: (_c = user.mechanisms) !== null && _c !== void 0 ? _c : ['SCRAM-SHA-256'],
digestPassword: (_d = user.digestPassword) !== null && _d !== void 0 ? _d : true,
// "writeConcern" is needced, otherwise replset servers might fail with "auth failed: such user does not exist"
writeConcern: {
w: 'majority',
},
});
}
}
}
finally {
// close connection in any case (even if throwing a error or being successfull)
yield con.close();
}
});
}
finally {
// close connection in any case (even if throwing a error or being successfull)
await con.close();
}
}

@@ -496,5 +486,5 @@ /**

}
return typeof this.auth.disable === 'boolean' // if "this._replSetOpts.auth.disable" is defined, use that
? !this.auth.disable // invert the disable boolean, because "auth" should only be disabled if "disabled = true"
: true; // if "this._replSetOpts.auth.disable" is not defined, default to true because "this._replSetOpts.auth" is defined
return typeof this.auth.enable === 'boolean' // if "this._replSetOpts.auth.enable" is defined, use that
? this.auth.enable
: false; // if "this._replSetOpts.auth.enable" is not defined, default to false
}

@@ -501,0 +491,0 @@ }

@@ -27,3 +27,3 @@ import { AnyOS } from './getos';

/** Path for "~/.config/" (user home) */
legacyHomeCache: string;
homeCache: string;
/** Path for "PROJECT/node_modules/.cache/" (project local cache) */

@@ -30,0 +30,0 @@ modulesCache: string;

@@ -5,8 +5,8 @@ "use strict";

const tslib_1 = require("tslib");
const debug_1 = (0, tslib_1.__importDefault)(require("debug"));
const debug_1 = tslib_1.__importDefault(require("debug"));
const resolveConfig_1 = require("./resolveConfig");
const utils_1 = require("./utils");
const path = (0, tslib_1.__importStar)(require("path"));
const path = tslib_1.__importStar(require("path"));
const os_1 = require("os");
const find_cache_dir_1 = (0, tslib_1.__importDefault)(require("find-cache-dir"));
const find_cache_dir_1 = tslib_1.__importDefault(require("find-cache-dir"));
const getos_1 = require("./getos");

@@ -24,29 +24,27 @@ const errors_1 = require("./errors");

*/
static locateBinary(opts) {
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
log(`locateBinary: Trying to locate Binary for version "${opts.version}"`);
const useOpts = yield this.generateOptions(opts);
if (!!useOpts.systemBinary) {
log(`locateBinary: env "SYSTEM_BINARY" was provided with value: "${useOpts.systemBinary}"`);
const systemReturn = yield this.getSystemPath(useOpts.systemBinary);
if ((0, utils_1.isNullOrUndefined)(systemReturn)) {
throw new errors_1.NoSystemBinaryFoundError(useOpts.systemBinary);
}
return systemReturn;
static async locateBinary(opts) {
log(`locateBinary: Trying to locate Binary for version "${opts.version}"`);
const useOpts = await this.generateOptions(opts);
if (!!useOpts.systemBinary) {
log(`locateBinary: env "SYSTEM_BINARY" was provided with value: "${useOpts.systemBinary}"`);
const systemReturn = await this.getSystemPath(useOpts.systemBinary);
if ((0, utils_1.isNullOrUndefined)(systemReturn)) {
throw new errors_1.BinaryNotFoundError(useOpts.systemBinary, ' (systemBinary)');
}
if (this.binaryCache.has(opts.version)) {
const binary = this.binaryCache.get(opts.version);
log(`locateBinary: Requested Version found in cache: "[${opts.version}, ${binary}]"`);
return binary;
}
log('locateBinary: running generateDownloadPath');
const returnValue = yield this.generateDownloadPath(useOpts);
if (!returnValue[0]) {
log('locateBinary: could not find a existing binary');
return undefined;
}
log(`locateBinary: found binary at "${returnValue[1]}"`);
this.binaryCache.set(opts.version, returnValue[1]);
return returnValue[1];
});
return systemReturn;
}
if (this.binaryCache.has(opts.version)) {
const binary = this.binaryCache.get(opts.version);
log(`locateBinary: Requested Version found in cache: "[${opts.version}, ${binary}]"`);
return binary;
}
log('locateBinary: running generateDownloadPath');
const returnValue = await this.generateDownloadPath(useOpts);
if (!returnValue[0]) {
log('locateBinary: could not find a existing binary');
return undefined;
}
log(`locateBinary: found binary at "${returnValue[1]}"`);
this.binaryCache.set(opts.version, returnValue[1]);
return returnValue[1];
}

@@ -56,31 +54,28 @@ /**

*/
static generateOptions(opts) {
var _a, _b;
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
log('generateOptions');
const defaultVersion = (_a = (0, resolveConfig_1.resolveConfig)(resolveConfig_1.ResolveConfigVariables.VERSION)) !== null && _a !== void 0 ? _a : resolveConfig_1.DEFAULT_VERSION;
const ensuredOpts = (0, utils_1.isNullOrUndefined)(opts)
? { version: defaultVersion }
: opts;
const final = {
version: ensuredOpts.version || defaultVersion,
downloadDir: (0, resolveConfig_1.resolveConfig)(resolveConfig_1.ResolveConfigVariables.DOWNLOAD_DIR) || ensuredOpts.downloadDir || '',
os: (_b = ensuredOpts.os) !== null && _b !== void 0 ? _b : (yield (0, getos_1.getOS)()),
platform: ensuredOpts.platform || (0, os_1.platform)(),
arch: ensuredOpts.arch || (0, os_1.arch)(),
systemBinary: (0, resolveConfig_1.resolveConfig)(resolveConfig_1.ResolveConfigVariables.SYSTEM_BINARY) || ensuredOpts.systemBinary || '',
};
final.downloadDir = path.dirname((yield this.generateDownloadPath(final))[1]);
// if truthy
if ((0, resolveConfig_1.resolveConfig)(resolveConfig_1.ResolveConfigVariables.ARCHIVE_NAME) ||
(0, resolveConfig_1.resolveConfig)(resolveConfig_1.ResolveConfigVariables.DOWNLOAD_URL)) {
// "DOWNLOAD_URL" will be used over "ARCHIVE_NAME"
// the "as string" cast is there because it is already checked that one of the 2 exists, and "resolveConfig" ensures it only returns strings
const input = ((0, resolveConfig_1.resolveConfig)(resolveConfig_1.ResolveConfigVariables.DOWNLOAD_URL) ||
(0, resolveConfig_1.resolveConfig)(resolveConfig_1.ResolveConfigVariables.ARCHIVE_NAME));
log(`generateOptions: ARCHIVE_NAME or DOWNLOAD_URL defined, generating options based on that (input: "${input}")`);
return this.parseArchiveNameRegex(input, final);
}
return final;
});
static async generateOptions(opts) {
log('generateOptions');
const defaultVersion = (0, resolveConfig_1.resolveConfig)(resolveConfig_1.ResolveConfigVariables.VERSION) ?? resolveConfig_1.DEFAULT_VERSION;
const ensuredOpts = (0, utils_1.isNullOrUndefined)(opts)
? { version: defaultVersion }
: opts;
const final = {
version: ensuredOpts.version || defaultVersion,
downloadDir: (0, resolveConfig_1.resolveConfig)(resolveConfig_1.ResolveConfigVariables.DOWNLOAD_DIR) || ensuredOpts.downloadDir || '',
os: ensuredOpts.os ?? (await (0, getos_1.getOS)()),
platform: ensuredOpts.platform || (0, os_1.platform)(),
arch: ensuredOpts.arch || (0, os_1.arch)(),
systemBinary: (0, resolveConfig_1.resolveConfig)(resolveConfig_1.ResolveConfigVariables.SYSTEM_BINARY) || ensuredOpts.systemBinary || '',
};
final.downloadDir = path.dirname((await this.generateDownloadPath(final))[1]);
// if truthy
if ((0, resolveConfig_1.resolveConfig)(resolveConfig_1.ResolveConfigVariables.ARCHIVE_NAME) ||
(0, resolveConfig_1.resolveConfig)(resolveConfig_1.ResolveConfigVariables.DOWNLOAD_URL)) {
// "DOWNLOAD_URL" will be used over "ARCHIVE_NAME"
// the "as string" cast is there because it is already checked that one of the 2 exists, and "resolveConfig" ensures it only returns strings
const input = ((0, resolveConfig_1.resolveConfig)(resolveConfig_1.ResolveConfigVariables.DOWNLOAD_URL) ||
(0, resolveConfig_1.resolveConfig)(resolveConfig_1.ResolveConfigVariables.ARCHIVE_NAME));
log(`generateOptions: ARCHIVE_NAME or DOWNLOAD_URL defined, generating options based on that (input: "${input}")`);
return this.parseArchiveNameRegex(input, final);
}
return final;
}

@@ -110,3 +105,3 @@ /**

os: 'linux',
dist: typeof (distMatches === null || distMatches === void 0 ? void 0 : distMatches[1]) === 'string' ? distMatches[1] : 'unknown',
dist: typeof distMatches?.[1] === 'string' ? distMatches[1] : 'unknown',
// "release" should be able to be discarded in this case

@@ -125,17 +120,15 @@ release: '',

*/
static getBinaryName(opts) {
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
log('getBinaryName');
let binaryName;
if ((0, resolveConfig_1.envToBool)((0, resolveConfig_1.resolveConfig)(resolveConfig_1.ResolveConfigVariables.USE_ARCHIVE_NAME_FOR_BINARY_NAME))) {
const archiveName = yield new MongoBinaryDownloadUrl_1.MongoBinaryDownloadUrl(opts).getArchiveName();
binaryName = path.parse(archiveName).name;
}
else {
const addExe = opts.platform === 'win32' ? '.exe' : '';
const dist = (0, getos_1.isLinuxOS)(opts.os) ? opts.os.dist : opts.os.os;
binaryName = `mongod-${opts.arch}-${dist}-${opts.version}${addExe}`;
}
return binaryName;
});
static async getBinaryName(opts) {
log('getBinaryName');
let binaryName;
if ((0, resolveConfig_1.envToBool)((0, resolveConfig_1.resolveConfig)(resolveConfig_1.ResolveConfigVariables.USE_ARCHIVE_NAME_FOR_BINARY_NAME))) {
const archiveName = await new MongoBinaryDownloadUrl_1.MongoBinaryDownloadUrl(opts).getArchiveName();
binaryName = path.parse(archiveName).name;
}
else {
const addExe = opts.platform === 'win32' ? '.exe' : '';
const dist = (0, getos_1.isLinuxOS)(opts.os) ? opts.os.dist : opts.os.os;
binaryName = `mongod-${opts.arch}-${dist}-${opts.version}${addExe}`;
}
return binaryName;
}

@@ -154,16 +147,13 @@ /**

*/
static getSystemPath(systemBinary) {
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
// REFACTOR: change this function to always return "string"
log('getSystempath');
try {
yield (0, utils_1.checkBinaryPermissions)(systemBinary);
log(`getSystemPath: found system binary path at "${systemBinary}"`);
return systemBinary; // returns if "access" is successful
}
catch (err) {
log(`getSystemPath: can't find system binary at "${systemBinary}".\n${err instanceof Error ? err.message : err}`);
}
return undefined;
});
static async getSystemPath(systemBinary) {
log('getSystempath');
try {
await (0, utils_1.checkBinaryPermissions)(systemBinary);
log(`getSystemPath: found system binary path at "${systemBinary}"`);
return systemBinary; // returns if "access" is successful
}
catch (err) {
log(`getSystemPath: can't find system binary at "${systemBinary}".\n${err instanceof Error ? err.message : err}`);
}
return undefined;
}

@@ -176,39 +166,37 @@ /**

*/
static generatePaths(opts) {
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
log('generatePaths', opts);
const final = {
legacyHomeCache: '',
modulesCache: '',
relative: '',
resolveConfig: '',
};
const binaryName = yield this.getBinaryName(opts);
// Assign "node_modules/.cache" to modulesCache
// if we're in postinstall script, npm will set the cwd too deep
// when in postinstall, npm will provide an "INIT_CWD" env variable
let nodeModulesDLDir = process.env['INIT_CWD'] || process.cwd();
// as long as "node_modules/mongodb-memory-server*" is included in the path, go the paths up
while (nodeModulesDLDir.includes(`node_modules${path.sep}mongodb-memory-server`)) {
nodeModulesDLDir = path.resolve(nodeModulesDLDir, '..', '..');
}
const tmpModulesCache = (0, find_cache_dir_1.default)({
name: 'mongodb-memory-server',
cwd: nodeModulesDLDir,
});
if (!(0, utils_1.isNullOrUndefined)(tmpModulesCache)) {
final.modulesCache = this.combineBinaryName(path.resolve(tmpModulesCache), binaryName);
}
const legacyHomeCache = path.resolve(this.homedir(), '.cache/mongodb-binaries');
final.legacyHomeCache = this.combineBinaryName(legacyHomeCache, binaryName);
// Resolve the config value "DOWNLOAD_DIR" if provided, otherwise remove from list
const resolveConfigValue = opts.downloadDir || (0, resolveConfig_1.resolveConfig)(resolveConfig_1.ResolveConfigVariables.DOWNLOAD_DIR);
if (!(0, utils_1.isNullOrUndefined)(resolveConfigValue) && resolveConfigValue.length > 0) {
log(`generatePaths: resolveConfigValue is not empty`);
final.resolveConfig = this.combineBinaryName(resolveConfigValue, binaryName);
}
// Resolve relative to cwd if no other has been found
final.relative = this.combineBinaryName(path.resolve(process.cwd(), 'mongodb-binaries'), binaryName);
return final;
static async generatePaths(opts) {
log('generatePaths', opts);
const final = {
homeCache: '',
modulesCache: '',
relative: '',
resolveConfig: '',
};
const binaryName = await this.getBinaryName(opts);
// Assign "node_modules/.cache" to modulesCache
// if we're in postinstall script, npm will set the cwd too deep
// when in postinstall, npm will provide an "INIT_CWD" env variable
let nodeModulesDLDir = process.env['INIT_CWD'] || process.cwd();
// as long as "node_modules/mongodb-memory-server*" is included in the path, go the paths up
while (nodeModulesDLDir.includes(`node_modules${path.sep}mongodb-memory-server`)) {
nodeModulesDLDir = path.resolve(nodeModulesDLDir, '..', '..');
}
const tmpModulesCache = (0, find_cache_dir_1.default)({
name: 'mongodb-memory-server',
cwd: nodeModulesDLDir,
});
if (!(0, utils_1.isNullOrUndefined)(tmpModulesCache)) {
final.modulesCache = this.combineBinaryName(path.resolve(tmpModulesCache), binaryName);
}
const homeCache = path.resolve(this.homedir(), '.cache/mongodb-binaries');
final.homeCache = this.combineBinaryName(homeCache, binaryName);
// Resolve the config value "DOWNLOAD_DIR" if provided, otherwise remove from list
const resolveConfigValue = opts.downloadDir || (0, resolveConfig_1.resolveConfig)(resolveConfig_1.ResolveConfigVariables.DOWNLOAD_DIR);
if (!(0, utils_1.isNullOrUndefined)(resolveConfigValue) && resolveConfigValue.length > 0) {
log(`generatePaths: resolveConfigValue is not empty`);
final.resolveConfig = this.combineBinaryName(resolveConfigValue, binaryName);
}
// Resolve relative to cwd if no other has been found
final.relative = this.combineBinaryName(path.resolve(process.cwd(), 'mongodb-binaries'), binaryName);
return final;
}

@@ -219,50 +207,48 @@ /**

*/
static generateDownloadPath(opts) {
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
const preferGlobal = (0, resolveConfig_1.envToBool)((0, resolveConfig_1.resolveConfig)(resolveConfig_1.ResolveConfigVariables.PREFER_GLOBAL_PATH));
log(`generateDownloadPath: Generating Download Path, preferGlobal: "${preferGlobal}"`);
const paths = yield this.generatePaths(opts);
log('generateDownloadPath: Paths:', paths, opts.systemBinary);
// SystemBinary will only be returned if defined and paths exists
if (!!opts.systemBinary && (yield (0, utils_1.pathExists)(opts.systemBinary))) {
const sysPath = yield this.getSystemPath(opts.systemBinary);
if (!(0, utils_1.isNullOrUndefined)(sysPath)) {
return [true, sysPath];
}
static async generateDownloadPath(opts) {
const preferGlobal = (0, resolveConfig_1.envToBool)((0, resolveConfig_1.resolveConfig)(resolveConfig_1.ResolveConfigVariables.PREFER_GLOBAL_PATH));
log(`generateDownloadPath: Generating Download Path, preferGlobal: "${preferGlobal}"`);
const paths = await this.generatePaths(opts);
log('generateDownloadPath: Paths:', paths, opts.systemBinary);
// SystemBinary will only be returned if defined and paths exists
if (!!opts.systemBinary && (await (0, utils_1.pathExists)(opts.systemBinary))) {
const sysPath = await this.getSystemPath(opts.systemBinary);
if (!(0, utils_1.isNullOrUndefined)(sysPath)) {
return [true, sysPath];
}
// Section where paths are probed for an existing binary
if (yield (0, utils_1.pathExists)(paths.resolveConfig)) {
log(`generateDownloadPath: Found binary in resolveConfig (DOWNLOAD_DIR): "${paths.resolveConfig}"`);
return [true, paths.resolveConfig];
}
if (yield (0, utils_1.pathExists)(paths.legacyHomeCache)) {
log(`generateDownloadPath: Found binary in legacyHomeCache: "${paths.legacyHomeCache}"`);
return [true, paths.legacyHomeCache];
}
if (yield (0, utils_1.pathExists)(paths.modulesCache)) {
log(`generateDownloadPath: Found binary in modulesCache: "${paths.modulesCache}"`);
return [true, paths.modulesCache];
}
if (yield (0, utils_1.pathExists)(paths.relative)) {
log(`generateDownloadPath: Found binary in relative: "${paths.relative}"`);
return [true, paths.relative];
}
// Section where binary path gets generated when no binary was found
log(`generateDownloadPath: no existing binary for version "${opts.version}" was found`);
if (paths.resolveConfig.length > 0) {
log(`generateDownloadPath: using resolveConfig (DOWNLOAD_DIR) "${paths.resolveConfig}"`);
return [false, paths.resolveConfig];
}
if (preferGlobal && !!paths.legacyHomeCache) {
log(`generateDownloadPath: using global (preferGlobal) "${paths.legacyHomeCache}"`);
return [false, paths.legacyHomeCache];
}
// this case may not happen, if somehow the cwd gets changed outside of "node_modules" reach
if (paths.modulesCache.length > 0) {
log(`generateDownloadPath: using modulesCache "${paths.modulesCache}"`);
return [false, paths.modulesCache];
}
log(`generateDownloadPath: using relative "${paths.relative}"`);
return [false, paths.relative];
});
}
// Section where paths are probed for an existing binary
if (await (0, utils_1.pathExists)(paths.resolveConfig)) {
log(`generateDownloadPath: Found binary in resolveConfig (DOWNLOAD_DIR): "${paths.resolveConfig}"`);
return [true, paths.resolveConfig];
}
if (await (0, utils_1.pathExists)(paths.homeCache)) {
log(`generateDownloadPath: Found binary in homeCache: "${paths.homeCache}"`);
return [true, paths.homeCache];
}
if (await (0, utils_1.pathExists)(paths.modulesCache)) {
log(`generateDownloadPath: Found binary in modulesCache: "${paths.modulesCache}"`);
return [true, paths.modulesCache];
}
if (await (0, utils_1.pathExists)(paths.relative)) {
log(`generateDownloadPath: Found binary in relative: "${paths.relative}"`);
return [true, paths.relative];
}
// Section where binary path gets generated when no binary was found
log(`generateDownloadPath: no existing binary for version "${opts.version}" was found`);
if (paths.resolveConfig.length > 0) {
log(`generateDownloadPath: using resolveConfig (DOWNLOAD_DIR) "${paths.resolveConfig}"`);
return [false, paths.resolveConfig];
}
if (preferGlobal && !!paths.homeCache) {
log(`generateDownloadPath: using global (preferGlobal) "${paths.homeCache}"`);
return [false, paths.homeCache];
}
// this case may not happen, if somehow the cwd gets changed outside of "node_modules" reach
if (paths.modulesCache.length > 0) {
log(`generateDownloadPath: using modulesCache "${paths.modulesCache}"`);
return [false, paths.modulesCache];
}
log(`generateDownloadPath: using relative "${paths.relative}"`);
return [false, paths.relative];
}

@@ -277,3 +263,2 @@ /**

}
exports.DryMongoBinary = DryMongoBinary;
/**

@@ -283,2 +268,3 @@ * Binaries already found, values are: [Version, Path]

DryMongoBinary.binaryCache = new Map();
exports.DryMongoBinary = DryMongoBinary;
//# sourceMappingURL=DryMongoBinary.js.map

@@ -29,10 +29,2 @@ export declare class StateError extends Error {

}
export declare class EnsureInstanceError extends Error {
isRunning: boolean;
constructor(isRunning: boolean);
}
export declare class NoSystemBinaryFoundError extends Error {
binaryPath: string;
constructor(binaryPath: string);
}
export declare class Md5CheckFailedError extends Error {

@@ -63,3 +55,4 @@ binarymd5: string;

path: string;
constructor(path: string);
extra: string;
constructor(path: string, extra?: string);
}

@@ -119,4 +112,12 @@ /**

}
/**
* Error for when the linux distro is unknown
*/
export declare class UnknownLinuxDistro extends Error {
distro: string;
id_like: string[];
constructor(distro: string, id_like: string[]);
}
export declare class GenericMMSError extends Error {
}
//# sourceMappingURL=errors.d.ts.map
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.GenericMMSError = exports.DownloadError = exports.UnknownVersionError = exports.UnexpectedCloseError = exports.StdoutInstanceError = exports.KnownVersionIncompatibilityError = exports.NoRegexMatchError = exports.ParseArchiveRegexError = exports.ReplsetCountLowError = exports.AssertionFallbackError = exports.BinaryNotFoundError = exports.InsufficientPermissionsError = exports.AuthNotObjectError = exports.KeyFileMissingError = exports.InstanceInfoError = exports.StartBinaryFailedError = exports.Md5CheckFailedError = exports.NoSystemBinaryFoundError = exports.EnsureInstanceError = exports.WaitForPrimaryTimeoutError = exports.UnknownArchitectureError = exports.UnknownPlatformError = exports.UnableToUnlockLockfileError = exports.UnknownLockfileStatusError = exports.StateError = void 0;
exports.GenericMMSError = exports.UnknownLinuxDistro = exports.DownloadError = exports.UnknownVersionError = exports.UnexpectedCloseError = exports.StdoutInstanceError = exports.KnownVersionIncompatibilityError = exports.NoRegexMatchError = exports.ParseArchiveRegexError = exports.ReplsetCountLowError = exports.AssertionFallbackError = exports.BinaryNotFoundError = exports.InsufficientPermissionsError = exports.AuthNotObjectError = exports.KeyFileMissingError = exports.InstanceInfoError = exports.StartBinaryFailedError = exports.Md5CheckFailedError = exports.WaitForPrimaryTimeoutError = exports.UnknownArchitectureError = exports.UnknownPlatformError = exports.UnableToUnlockLockfileError = exports.UnknownLockfileStatusError = exports.StateError = void 0;
const utils_1 = require("./utils");

@@ -59,25 +59,2 @@ class StateError extends Error {

exports.WaitForPrimaryTimeoutError = WaitForPrimaryTimeoutError;
// REFACTOR: consider merging this with InstanceInfoError
class EnsureInstanceError extends Error {
constructor(isRunning) {
super();
this.isRunning = isRunning;
const baseMesasge = '"ensureInstance" failed, because';
if (isRunning) {
this.message = `${baseMesasge} state was "running" but "instanceInfo" was undefined!`;
}
else {
this.message = `${baseMesasge} "instanceInfo" was undefined after running "start"`;
}
}
}
exports.EnsureInstanceError = EnsureInstanceError;
// REFACTOR: merge this error with BinaryNotFoundError
class NoSystemBinaryFoundError extends Error {
constructor(binaryPath) {
super(`Config option "SYSTEM_BINARY" was provided with value "${binaryPath}", but no binary could be found!`);
this.binaryPath = binaryPath;
}
}
exports.NoSystemBinaryFoundError = NoSystemBinaryFoundError;
class Md5CheckFailedError extends Error {

@@ -125,5 +102,6 @@ constructor(binarymd5, checkfilemd5) {

class BinaryNotFoundError extends Error {
constructor(path) {
super(`No Binary at path "${path}" was found! (ENOENT)`);
constructor(path, extra = '') {
super(`No Binary at path "${path}" was found! (ENOENT)${extra}`);
this.path = path;
this.extra = extra;
}

@@ -198,3 +176,3 @@ }

}
if (process.platform === 'win32' && (code !== null && code !== void 0 ? code : 0) > 1000000000) {
if (process.platform === 'win32' && (code ?? 0) > 1000000000) {
this.message +=

@@ -227,2 +205,13 @@ '\nExit Code is large, commonly meaning that vc_redist is not installed, the latest vc_redist can be found at https://learn.microsoft.com/en-us/cpp/windows/latest-supported-vc-redist?view=msvc-170';

exports.DownloadError = DownloadError;
/**
* Error for when the linux distro is unknown
*/
class UnknownLinuxDistro extends Error {
constructor(distro, id_like) {
super(`Unknown/unsupported linux "${distro}" id_like's: [${id_like?.join(', ')}]`);
this.distro = distro;
this.id_like = id_like;
}
}
exports.UnknownLinuxDistro = UnknownLinuxDistro;
/* Custom Generic Error class for MMS */

@@ -229,0 +218,0 @@ class GenericMMSError extends Error {

@@ -13,3 +13,3 @@ /** Helper Static so that a consistent UNKNOWN value is used */

}
export declare type AnyOS = OtherOS | LinuxOS;
export type AnyOS = OtherOS | LinuxOS;
/**

@@ -16,0 +16,0 @@ * Check if the OS is a LinuxOS Typeguard

@@ -6,3 +6,3 @@ "use strict";

const os_1 = require("os");
const debug_1 = (0, tslib_1.__importDefault)(require("debug"));
const debug_1 = tslib_1.__importDefault(require("debug"));
const utils_1 = require("../utils");

@@ -39,55 +39,51 @@ const log = (0, debug_1.default)('MongoMS:getos');

/** Get an OS object */
function getOS() {
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
if (!cachedOs) {
/** Node builtin function for first determinations */
const osName = (0, os_1.platform)();
// Linux is a special case.
if (osName === 'linux') {
cachedOs = yield getLinuxInformation();
}
else {
cachedOs = { os: osName };
}
async function getOS() {
if (!cachedOs) {
/** Node builtin function for first determinations */
const osName = (0, os_1.platform)();
// Linux is a special case.
if (osName === 'linux') {
cachedOs = await getLinuxInformation();
}
return cachedOs;
});
else {
cachedOs = { os: osName };
}
}
return cachedOs;
}
exports.getOS = getOS;
/** Function to outsource Linux Information Parsing */
function getLinuxInformation() {
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
// Structure of this function:
// 1. get upstream release, if possible
// 2. get os release (etc) because it has an "id_like"
// 3. get os release (usr) because it has an "id_like"
// 4. get lsb-release (etc) as fallback
const upstreamLSB = yield (0, utils_1.tryReleaseFile)('/etc/upstream-release/lsb-release', parseLSB);
if (isValidOs(upstreamLSB)) {
log('getLinuxInformation: Using UpstreamLSB');
return upstreamLSB;
}
const etcOsRelease = yield (0, utils_1.tryReleaseFile)('/etc/os-release', parseOS);
if (isValidOs(etcOsRelease)) {
log('getLinuxInformation: Using etcOsRelease');
return etcOsRelease;
}
const usrOsRelease = yield (0, utils_1.tryReleaseFile)('/usr/lib/os-release', parseOS);
if (isValidOs(usrOsRelease)) {
log('getLinuxInformation: Using usrOsRelease');
return usrOsRelease;
}
const etcLSBRelease = yield (0, utils_1.tryReleaseFile)('/etc/lsb-release', parseLSB);
if (isValidOs(etcLSBRelease)) {
log('getLinuxInformation: Using etcLSBRelease');
return etcLSBRelease;
}
console.warn('Could not find any valid Release File, using fallback information');
// if none has worked, return unknown
return {
os: 'linux',
dist: exports.UNKNOWN,
release: '',
};
});
async function getLinuxInformation() {
// Structure of this function:
// 1. get upstream release, if possible
// 2. get os release (etc) because it has an "id_like"
// 3. get os release (usr) because it has an "id_like"
// 4. get lsb-release (etc) as fallback
const upstreamLSB = await (0, utils_1.tryReleaseFile)('/etc/upstream-release/lsb-release', parseLSB);
if (isValidOs(upstreamLSB)) {
log('getLinuxInformation: Using UpstreamLSB');
return upstreamLSB;
}
const etcOsRelease = await (0, utils_1.tryReleaseFile)('/etc/os-release', parseOS);
if (isValidOs(etcOsRelease)) {
log('getLinuxInformation: Using etcOsRelease');
return etcOsRelease;
}
const usrOsRelease = await (0, utils_1.tryReleaseFile)('/usr/lib/os-release', parseOS);
if (isValidOs(usrOsRelease)) {
log('getLinuxInformation: Using usrOsRelease');
return usrOsRelease;
}
const etcLSBRelease = await (0, utils_1.tryReleaseFile)('/etc/lsb-release', parseLSB);
if (isValidOs(etcLSBRelease)) {
log('getLinuxInformation: Using etcLSBRelease');
return etcLSBRelease;
}
console.warn('Could not find any valid Release File, using fallback information');
// if none has worked, return unknown
return {
os: 'linux',
dist: exports.UNKNOWN,
release: '',
};
}

@@ -111,8 +107,7 @@ /**

function parseLSB(input) {
var _a, _b, _c, _d, _e;
return {
os: 'linux',
dist: (_b = (_a = input.match(LSBRegex.name)) === null || _a === void 0 ? void 0 : _a[1].toLocaleLowerCase()) !== null && _b !== void 0 ? _b : exports.UNKNOWN,
codename: (_c = input.match(LSBRegex.codename)) === null || _c === void 0 ? void 0 : _c[1].toLocaleLowerCase(),
release: (_e = (_d = input.match(LSBRegex.release)) === null || _d === void 0 ? void 0 : _d[1].toLocaleLowerCase()) !== null && _e !== void 0 ? _e : '',
dist: input.match(LSBRegex.name)?.[1].toLocaleLowerCase() ?? exports.UNKNOWN,
codename: input.match(LSBRegex.codename)?.[1].toLocaleLowerCase(),
release: input.match(LSBRegex.release)?.[1].toLocaleLowerCase() ?? '',
};

@@ -125,9 +120,8 @@ }

function parseOS(input) {
var _a, _b, _c, _d, _e, _f;
return {
os: 'linux',
dist: (_b = (_a = input.match(OSRegex.name)) === null || _a === void 0 ? void 0 : _a[1].toLocaleLowerCase()) !== null && _b !== void 0 ? _b : exports.UNKNOWN,
codename: (_c = input.match(OSRegex.codename)) === null || _c === void 0 ? void 0 : _c[1].toLocaleLowerCase(),
release: (_e = (_d = input.match(OSRegex.release)) === null || _d === void 0 ? void 0 : _d[1].toLocaleLowerCase()) !== null && _e !== void 0 ? _e : '',
id_like: (_f = input.match(OSRegex.id_like)) === null || _f === void 0 ? void 0 : _f[1].toLocaleLowerCase().split(' '),
dist: input.match(OSRegex.name)?.[1].toLocaleLowerCase() ?? exports.UNKNOWN,
codename: input.match(OSRegex.codename)?.[1].toLocaleLowerCase(),
release: input.match(OSRegex.release)?.[1].toLocaleLowerCase() ?? '',
id_like: input.match(OSRegex.id_like)?.[1].toLocaleLowerCase().split(' '),
};

@@ -134,0 +128,0 @@ }

@@ -6,8 +6,7 @@ "use strict";

const events_1 = require("events");
const utils = (0, tslib_1.__importStar)(require("./utils"));
const debug_1 = (0, tslib_1.__importDefault)(require("debug"));
const path = (0, tslib_1.__importStar)(require("path"));
const utils = tslib_1.__importStar(require("./utils"));
const debug_1 = tslib_1.__importDefault(require("debug"));
const path = tslib_1.__importStar(require("path"));
const fs_1 = require("fs");
const async_mutex_1 = require("async-mutex");
const uuid_1 = require("uuid");
const errors_1 = require("./errors");

@@ -52,6 +51,2 @@ const log = (0, debug_1.default)('MongoMS:LockFile');

class LockFile {
constructor(file, uuid) {
this.file = file;
this.uuid = uuid;
}
/**

@@ -61,20 +56,18 @@ * Acquire an lockfile

*/
static lock(file) {
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
yield utils.ensureAsync();
log(`lock: Locking file "${file}"`);
const useFile = path.resolve(file.trim());
// just to make sure "path" could resolve it to something
utils.assertion(useFile.length > 0, new Error('Provided Path for lock file is length of 0'));
const status = yield this.checkLock(useFile);
switch (status) {
case LockFileStatus.lockedDifferent:
case LockFileStatus.lockedSelf:
return this.waitForLock(useFile);
case LockFileStatus.available:
return this.createLock(useFile);
default:
throw new errors_1.UnknownLockfileStatusError(status);
}
});
static async lock(file) {
await utils.ensureAsync();
log(`lock: Locking file "${file}"`);
const useFile = path.resolve(file.trim());
// just to make sure "path" could resolve it to something
utils.assertion(useFile.length > 0, new Error('Provided Path for lock file is length of 0'));
const status = await this.checkLock(useFile);
switch (status) {
case LockFileStatus.lockedDifferent:
case LockFileStatus.lockedSelf:
return this.waitForLock(useFile);
case LockFileStatus.available:
return this.createLock(useFile);
default:
throw new errors_1.UnknownLockfileStatusError(status);
}
}

@@ -85,38 +78,36 @@ /**

*/
static checkLock(file, uuid) {
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
log(`checkLock: for file "${file}" with uuid: "${uuid}"`);
// if file / path does not exist, directly acquire lock
if (!(yield utils.pathExists(file))) {
return LockFileStatus.available;
}
try {
const fileData = (yield fs_1.promises.readFile(file)).toString().trim().split(' ');
const readout = parseInt(fileData[0]);
if (readout === process.pid) {
log(`checkLock: Lock File Already exists, and is for *this* process, with uuid: "${fileData[1]}"`);
// early return if "file"(input) dosnt exists within the files Map anymore
if (!this.files.has(file)) {
return LockFileStatus.available;
}
// check if "uuid"(input) matches the filereadout, if yes say "available" (for unlock check)
if (!utils.isNullOrUndefined(uuid)) {
return uuid === fileData[1]
? LockFileStatus.availableInstance
: LockFileStatus.lockedSelf;
}
// as fallback say "lockedSelf"
return LockFileStatus.lockedSelf;
}
log(`checkLock: Lock File Aready exists, for a different process: "${readout}"`);
return utils.isAlive(readout) ? LockFileStatus.lockedDifferent : LockFileStatus.available;
}
catch (err) {
if (utils.errorWithCode(err) && err.code === 'ENOENT') {
log('checkLock: reading file failed with ENOENT');
static async checkLock(file, uuid) {
log(`checkLock: for file "${file}" with uuid: "${uuid}"`);
// if file / path does not exist, directly acquire lock
if (!(await utils.pathExists(file))) {
return LockFileStatus.available;
}
try {
const fileData = (await fs_1.promises.readFile(file)).toString().trim().split(' ');
const readout = parseInt(fileData[0]);
if (readout === process.pid) {
log(`checkLock: Lock File Already exists, and is for *this* process, with uuid: "${fileData[1]}"`);
// early return if "file"(input) dosnt exists within the files Map anymore
if (!this.files.has(file)) {
return LockFileStatus.available;
}
throw err;
// check if "uuid"(input) matches the filereadout, if yes say "available" (for unlock check)
if (!utils.isNullOrUndefined(uuid)) {
return uuid === fileData[1]
? LockFileStatus.availableInstance
: LockFileStatus.lockedSelf;
}
// as fallback say "lockedSelf"
return LockFileStatus.lockedSelf;
}
});
log(`checkLock: Lock File Aready exists, for a different process: "${readout}"`);
return utils.isAlive(readout) ? LockFileStatus.lockedDifferent : LockFileStatus.available;
}
catch (err) {
if (utils.errorWithCode(err) && err.code === 'ENOENT') {
log('checkLock: reading file failed with ENOENT');
return LockFileStatus.available;
}
throw err;
}
}

@@ -127,45 +118,43 @@ /**

*/
static waitForLock(file) {
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
log(`waitForLock: Starting to wait for file "${file}"`);
/** Store the interval id to be cleared later */
let interval = undefined;
/** Store the function in an value to be cleared later, without having to use an class-external or class function */
let eventCB = undefined;
yield new Promise((res) => {
eventCB = (unlockedFile) => {
if (unlockedFile === file) {
res();
}
};
interval = setInterval(() => (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
const lockStatus = yield this.checkLock(file);
log(`waitForLock: Interval for file "${file}" with status "${lockStatus}"`);
if (lockStatus === LockFileStatus.available) {
res();
}
}), 1000 * 3); // every 3 seconds
this.events.on(LockFileEvents.unlock, eventCB);
});
if (interval) {
clearInterval(interval);
}
if (eventCB) {
this.events.removeListener(LockFileEvents.unlock, eventCB);
}
log(`waitForLock: File became available "${file}"`);
// i hope the following prevents race-conditions
yield utils.ensureAsync(); // to make sure all event listeners got executed
const lockStatus = yield this.checkLock(file);
log(`waitForLock: Lock File Status reassessment for file "${file}": ${lockStatus}`);
switch (lockStatus) {
case LockFileStatus.lockedDifferent:
case LockFileStatus.lockedSelf:
return this.waitForLock(file);
case LockFileStatus.available:
return this.createLock(file);
default:
throw new errors_1.UnknownLockfileStatusError(lockStatus);
}
static async waitForLock(file) {
log(`waitForLock: Starting to wait for file "${file}"`);
/** Store the interval id to be cleared later */
let interval = undefined;
/** Store the function in an value to be cleared later, without having to use an class-external or class function */
let eventCB = undefined;
await new Promise((res) => {
eventCB = (unlockedFile) => {
if (unlockedFile === file) {
res();
}
};
interval = setInterval(async () => {
const lockStatus = await this.checkLock(file);
log(`waitForLock: Interval for file "${file}" with status "${lockStatus}"`);
if (lockStatus === LockFileStatus.available) {
res();
}
}, 1000 * 3); // every 3 seconds
this.events.on(LockFileEvents.unlock, eventCB);
});
if (interval) {
clearInterval(interval);
}
if (eventCB) {
this.events.removeListener(LockFileEvents.unlock, eventCB);
}
log(`waitForLock: File became available "${file}"`);
// i hope the following prevents race-conditions
await utils.ensureAsync(); // to make sure all event listeners got executed
const lockStatus = await this.checkLock(file);
log(`waitForLock: Lock File Status reassessment for file "${file}": ${lockStatus}`);
switch (lockStatus) {
case LockFileStatus.lockedDifferent:
case LockFileStatus.lockedSelf:
return this.waitForLock(file);
case LockFileStatus.available:
return this.createLock(file);
default:
throw new errors_1.UnknownLockfileStatusError(lockStatus);
}
}

@@ -176,58 +165,57 @@ /**

*/
static createLock(file) {
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
// this function only gets called by processed "file" input, so no re-checking
log(`createLock: trying to create a lock file for "${file}"`);
const uuid = (0, uuid_1.v4)();
// This is not an ".catch" because in an callback running "return" dosnt "return" the parent function
try {
yield this.mutex.runExclusive(() => (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
// this may cause "Stack Size" errors, because of an infinite loop if too many times this gets called
if (this.files.has(file)) {
log(`createLock: Map already has file "${file}"`);
throw new RepeatError(true);
}
yield utils.mkdir(path.dirname(file));
yield fs_1.promises.writeFile(file, `${process.pid.toString()} ${uuid}`);
this.files.add(file);
this.events.emit(LockFileEvents.lock, file);
}));
}
catch (err) {
if (err instanceof RepeatError && err.repeat) {
return this.waitForLock(file);
static async createLock(file) {
// this function only gets called by processed "file" input, so no re-checking
log(`createLock: trying to create a lock file for "${file}"`);
const uuid = utils.uuidv4();
// This is not an ".catch" because in an callback running "return" dosnt "return" the parent function
try {
await this.mutex.runExclusive(async () => {
// this may cause "Stack Size" errors, because of an infinite loop if too many times this gets called
if (this.files.has(file)) {
log(`createLock: Map already has file "${file}"`);
throw new RepeatError(true);
}
await utils.mkdir(path.dirname(file));
await fs_1.promises.writeFile(file, `${process.pid.toString()} ${uuid}`);
this.files.add(file);
this.events.emit(LockFileEvents.lock, file);
});
}
catch (err) {
if (err instanceof RepeatError && err.repeat) {
return this.waitForLock(file);
}
log(`createLock: Lock File Created for file "${file}"`);
return new this(file, uuid);
});
}
log(`createLock: Lock File Created for file "${file}"`);
return new this(file, uuid);
}
constructor(file, uuid) {
this.file = file;
this.uuid = uuid;
}
/**
* Unlock the File that is locked by this instance
*/
unlock() {
var _a;
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
yield utils.ensureAsync();
log(`unlock: Unlocking file "${this.file}"`);
if (utils.isNullOrUndefined(this.file) || ((_a = this.file) === null || _a === void 0 ? void 0 : _a.length) <= 0) {
log('unlock: invalid file, returning');
async unlock() {
await utils.ensureAsync();
log(`unlock: Unlocking file "${this.file}"`);
if (utils.isNullOrUndefined(this.file) || this.file?.length <= 0) {
log('unlock: invalid file, returning');
return;
}
// No "case-fallthrough" because this is more clear (and no linter will complain)
switch (await LockFile.checkLock(this.file, this.uuid)) {
case LockFileStatus.available:
log(`unlock: Lock Status was already "available" for file "${this.file}"`);
await this.unlockCleanup(false);
return;
}
// No "case-fallthrough" because this is more clear (and no linter will complain)
switch (yield LockFile.checkLock(this.file, this.uuid)) {
case LockFileStatus.available:
log(`unlock: Lock Status was already "available" for file "${this.file}"`);
yield this.unlockCleanup(false);
return;
case LockFileStatus.availableInstance:
log(`unlock: Lock Status was "availableInstance" for file "${this.file}"`);
yield this.unlockCleanup(true);
return;
case LockFileStatus.lockedSelf:
throw new errors_1.UnableToUnlockLockfileError(true, this.file);
default:
throw new errors_1.UnableToUnlockLockfileError(false, this.file);
}
});
case LockFileStatus.availableInstance:
log(`unlock: Lock Status was "availableInstance" for file "${this.file}"`);
await this.unlockCleanup(true);
return;
case LockFileStatus.lockedSelf:
throw new errors_1.UnableToUnlockLockfileError(true, this.file);
default:
throw new errors_1.UnableToUnlockLockfileError(false, this.file);
}
}

@@ -238,24 +226,21 @@ /**

*/
unlockCleanup(fileio = true) {
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
return yield LockFile.mutex.runExclusive(() => (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
log(`unlockCleanup: for file "${this.file}"`);
if (utils.isNullOrUndefined(this.file)) {
return;
}
if (fileio) {
yield fs_1.promises.unlink(this.file).catch((reason) => {
log(`unlockCleanup: lock file unlink failed: "${reason}"`);
});
}
LockFile.files.delete(this.file);
LockFile.events.emit(LockFileEvents.unlock, this.file);
// make this LockFile instance unusable (to prevent double unlock calling)
this.file = undefined;
this.uuid = undefined;
}));
async unlockCleanup(fileio = true) {
return await LockFile.mutex.runExclusive(async () => {
log(`unlockCleanup: for file "${this.file}"`);
if (utils.isNullOrUndefined(this.file)) {
return;
}
if (fileio) {
await fs_1.promises.unlink(this.file).catch((reason) => {
log(`unlockCleanup: lock file unlink failed: "${reason}"`);
});
}
LockFile.files.delete(this.file);
LockFile.events.emit(LockFileEvents.unlock, this.file);
// make this LockFile instance unusable (to prevent double unlock calling)
this.file = undefined;
this.uuid = undefined;
});
}
}
exports.LockFile = LockFile;
/** All Files that are handled by this process */

@@ -267,2 +252,3 @@ LockFile.files = new Set();

LockFile.mutex = new async_mutex_1.Mutex();
exports.LockFile = LockFile;
//# sourceMappingURL=lockfile.js.map

@@ -5,8 +5,8 @@ "use strict";

const tslib_1 = require("tslib");
const os_1 = (0, tslib_1.__importDefault)(require("os"));
const path_1 = (0, tslib_1.__importDefault)(require("path"));
const MongoBinaryDownload_1 = (0, tslib_1.__importDefault)(require("./MongoBinaryDownload"));
const resolveConfig_1 = (0, tslib_1.__importStar)(require("./resolveConfig"));
const debug_1 = (0, tslib_1.__importDefault)(require("debug"));
const semver = (0, tslib_1.__importStar)(require("semver"));
const os_1 = tslib_1.__importDefault(require("os"));
const path_1 = tslib_1.__importDefault(require("path"));
const MongoBinaryDownload_1 = tslib_1.__importDefault(require("./MongoBinaryDownload"));
const resolveConfig_1 = tslib_1.__importStar(require("./resolveConfig"));
const debug_1 = tslib_1.__importDefault(require("debug"));
const semver = tslib_1.__importStar(require("semver"));
const utils_1 = require("./utils");

@@ -26,36 +26,34 @@ const child_process_1 = require("child_process");

*/
static download(options) {
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
log('download');
const { downloadDir, version } = options;
// create downloadDir
yield (0, utils_1.mkdir)(downloadDir);
/** Lockfile path */
const lockfile = path_1.default.resolve(downloadDir, `${version}.lock`);
log(`download: Waiting to acquire Download lock for file "${lockfile}"`);
// wait to get a lock
// downloading of binaries may be quite long procedure
// that's why we are using so big wait/stale periods
const lock = yield lockfile_1.LockFile.lock(lockfile);
log('download: Download lock acquired');
// this is to ensure that the lockfile gets removed in case of an error
try {
// check cache if it got already added to the cache
if (!DryMongoBinary_1.DryMongoBinary.binaryCache.has(version)) {
log(`download: Adding version ${version} to cache`);
const downloader = new MongoBinaryDownload_1.default(options);
DryMongoBinary_1.DryMongoBinary.binaryCache.set(version, yield downloader.getMongodPath());
}
static async download(options) {
log('download');
const { downloadDir, version } = options;
// create downloadDir
await (0, utils_1.mkdir)(downloadDir);
/** Lockfile path */
const lockfile = path_1.default.resolve(downloadDir, `${version}.lock`);
log(`download: Waiting to acquire Download lock for file "${lockfile}"`);
// wait to get a lock
// downloading of binaries may be quite long procedure
// that's why we are using so big wait/stale periods
const lock = await lockfile_1.LockFile.lock(lockfile);
log('download: Download lock acquired');
// this is to ensure that the lockfile gets removed in case of an error
try {
// check cache if it got already added to the cache
if (!DryMongoBinary_1.DryMongoBinary.binaryCache.has(version)) {
log(`download: Adding version ${version} to cache`);
const downloader = new MongoBinaryDownload_1.default(options);
DryMongoBinary_1.DryMongoBinary.binaryCache.set(version, await downloader.getMongodPath());
}
finally {
log('download: Removing Download lock');
// remove lock
yield lock.unlock();
log('download: Download lock removed');
}
const cachePath = DryMongoBinary_1.DryMongoBinary.binaryCache.get(version);
// ensure that "path" exists, so the return type does not change
(0, utils_1.assertion)(typeof cachePath === 'string', new Error(`No Cache Path for version "${version}" found (and download failed silently?)`));
return cachePath;
});
}
finally {
log('download: Removing Download lock');
// remove lock
await lock.unlock();
log('download: Download lock removed');
}
const cachePath = DryMongoBinary_1.DryMongoBinary.binaryCache.get(version);
// ensure that "path" exists, so the return type does not change
(0, utils_1.assertion)(typeof cachePath === 'string', new Error(`No Cache Path for version "${version}" found (and download failed silently?)`));
return cachePath;
}

@@ -68,53 +66,55 @@ /**

*/
static getPath(opts = {}) {
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
log('getPath');
// "||" is still used here, because it should default if the value is false-y (like an empty string)
const options = Object.assign(Object.assign({}, (yield DryMongoBinary_1.DryMongoBinary.generateOptions(opts))), { platform: opts.platform || (0, resolveConfig_1.default)(resolveConfig_1.ResolveConfigVariables.PLATFORM) || os_1.default.platform(), checkMD5: opts.checkMD5 || (0, resolveConfig_1.envToBool)((0, resolveConfig_1.default)(resolveConfig_1.ResolveConfigVariables.MD5_CHECK)) });
log(`getPath: MongoBinary options:`, JSON.stringify(options, null, 2));
let binaryPath = yield DryMongoBinary_1.DryMongoBinary.locateBinary(options);
// check if the system binary has the same version as requested
if (!!options.systemBinary) {
// this case should actually never be false, because if "SYSTEM_BINARY" is set, "locateBinary" will run "getSystemPath" which tests the path for permissions
if (!(0, utils_1.isNullOrUndefined)(binaryPath)) {
log(`getPath: Spawning binaryPath "${binaryPath}" to get version`);
const spawnOutput = (0, child_process_1.spawnSync)(binaryPath, ['--version'])
.stdout.toString()
// this regex is to match the first line of the "mongod --version" output "db version v4.0.25" OR "db version v4.2.19-11-ge2f2736"
.match(/^\s*db\s+version\s+v?(\d+\.\d+\.\d+)(-\d*)?(-[a-zA-Z0-9].*)?\s*$/im);
(0, utils_1.assertion)(!(0, utils_1.isNullOrUndefined)(spawnOutput), new Error('Couldnt find an version from system binary output!'));
// dont warn if the versions dont match if "SYSTEM_BINARY_VERSION_CHECK" is false, but still test the binary if it is available to be executed
if ((0, resolveConfig_1.envToBool)((0, resolveConfig_1.default)(resolveConfig_1.ResolveConfigVariables.SYSTEM_BINARY_VERSION_CHECK))) {
log('getPath: Checking & Warning about version conflicts');
const binaryVersion = spawnOutput[1];
if (semver.neq(options.version, binaryVersion)) {
// we will log the version number of the system binary and the version requested so the user can see the difference
console.warn('getPath: MongoMemoryServer: Possible version conflict\n' +
` SystemBinary version: "${binaryVersion}"\n` +
` Requested version: "${options.version}"\n\n` +
' Using SystemBinary!');
}
static async getPath(opts = {}) {
log('getPath');
// "||" is still used here, because it should default if the value is false-y (like an empty string)
const options = {
...(await DryMongoBinary_1.DryMongoBinary.generateOptions(opts)),
platform: opts.platform || (0, resolveConfig_1.default)(resolveConfig_1.ResolveConfigVariables.PLATFORM) || os_1.default.platform(),
checkMD5: opts.checkMD5 || (0, resolveConfig_1.envToBool)((0, resolveConfig_1.default)(resolveConfig_1.ResolveConfigVariables.MD5_CHECK)),
};
log(`getPath: MongoBinary options:`, JSON.stringify(options, null, 2));
let binaryPath = await DryMongoBinary_1.DryMongoBinary.locateBinary(options);
// check if the system binary has the same version as requested
if (!!options.systemBinary) {
// this case should actually never be false, because if "SYSTEM_BINARY" is set, "locateBinary" will run "getSystemPath" which tests the path for permissions
if (!(0, utils_1.isNullOrUndefined)(binaryPath)) {
log(`getPath: Spawning binaryPath "${binaryPath}" to get version`);
const spawnOutput = (0, child_process_1.spawnSync)(binaryPath, ['--version'])
.stdout.toString()
// this regex is to match the first line of the "mongod --version" output "db version v4.0.25" OR "db version v4.2.19-11-ge2f2736"
.match(/^\s*db\s+version\s+v?(\d+\.\d+\.\d+)(-\d*)?(-[a-zA-Z0-9].*)?\s*$/im);
(0, utils_1.assertion)(!(0, utils_1.isNullOrUndefined)(spawnOutput), new Error('Couldnt find an version from system binary output!'));
// dont warn if the versions dont match if "SYSTEM_BINARY_VERSION_CHECK" is false, but still test the binary if it is available to be executed
if ((0, resolveConfig_1.envToBool)((0, resolveConfig_1.default)(resolveConfig_1.ResolveConfigVariables.SYSTEM_BINARY_VERSION_CHECK))) {
log('getPath: Checking & Warning about version conflicts');
const binaryVersion = spawnOutput[1];
if (semver.neq(options.version, binaryVersion)) {
// we will log the version number of the system binary and the version requested so the user can see the difference
console.warn('getPath: MongoMemoryServer: Possible version conflict\n' +
` SystemBinary version: "${binaryVersion}"\n` +
` Requested version: "${options.version}"\n\n` +
' Using SystemBinary!');
}
}
else {
throw new Error('Option "SYSTEM_BINARY" was set, but binaryPath was empty! (system binary could not be found?) [This Error should normally not be thrown, please report this]');
}
}
(0, utils_1.assertion)(typeof options.version === 'string', new Error('"MongoBinary.options.version" is not an string!'));
if (!binaryPath) {
if ((0, resolveConfig_1.envToBool)((0, resolveConfig_1.default)(resolveConfig_1.ResolveConfigVariables.RUNTIME_DOWNLOAD))) {
log('getPath: "RUNTIME_DOWNLOAD" is "true", trying to download');
binaryPath = yield this.download(options);
}
else {
log('getPath: "RUNTIME_DOWNLOAD" is "false", not downloading');
}
else {
throw new Error('Option "SYSTEM_BINARY" was set, but binaryPath was empty! (system binary could not be found?) [This Error should normally not be thrown, please report this]');
}
if (!binaryPath) {
const runtimeDownload = (0, resolveConfig_1.envToBool)((0, resolveConfig_1.default)(resolveConfig_1.ResolveConfigVariables.RUNTIME_DOWNLOAD));
throw new Error(`MongoBinary.getPath: could not find an valid binary path! (Got: "${binaryPath}", RUNTIME_DOWNLOAD: "${runtimeDownload}")`);
}
(0, utils_1.assertion)(typeof options.version === 'string', new Error('"MongoBinary.options.version" is not an string!'));
if (!binaryPath) {
if ((0, resolveConfig_1.envToBool)((0, resolveConfig_1.default)(resolveConfig_1.ResolveConfigVariables.RUNTIME_DOWNLOAD))) {
log('getPath: "RUNTIME_DOWNLOAD" is "true", trying to download');
binaryPath = await this.download(options);
}
log(`getPath: Mongod binary path: "${binaryPath}"`);
return binaryPath;
});
else {
log('getPath: "RUNTIME_DOWNLOAD" is "false", not downloading');
}
}
if (!binaryPath) {
const runtimeDownload = (0, resolveConfig_1.envToBool)((0, resolveConfig_1.default)(resolveConfig_1.ResolveConfigVariables.RUNTIME_DOWNLOAD));
throw new Error(`MongoBinary.getPath: could not find an valid binary path! (Got: "${binaryPath}", RUNTIME_DOWNLOAD: "${runtimeDownload}")`);
}
log(`getPath: Mongod binary path: "${binaryPath}"`);
return binaryPath;
}

@@ -121,0 +121,0 @@ }

/// <reference types="node" />
/// <reference types="node" />
import { URL } from 'url';

@@ -16,15 +17,5 @@ import { MongoBinaryOpts } from './MongoBinary';

dlProgress: MongoBinaryDownloadProgress;
_downloadingUrl?: string;
protected _downloadingUrl?: string;
/** These options are kind of raw, they are not run through DryMongoBinary.generateOptions */
binaryOpts: Required<MongoBinaryOpts>;
get checkMD5(): boolean;
set checkMD5(val: boolean);
get downloadDir(): string;
set downloadDir(val: string);
get arch(): string;
set arch(val: string);
get version(): string;
set version(val: string);
get platform(): string;
set platform(val: string);
constructor(opts: MongoBinaryOpts);

@@ -31,0 +22,0 @@ /**

@@ -5,15 +5,14 @@ "use strict";

const tslib_1 = require("tslib");
const os_1 = (0, tslib_1.__importDefault)(require("os"));
const os_1 = tslib_1.__importDefault(require("os"));
const url_1 = require("url");
const path_1 = (0, tslib_1.__importDefault)(require("path"));
const path_1 = tslib_1.__importDefault(require("path"));
const fs_1 = require("fs");
const md5_file_1 = (0, tslib_1.__importDefault)(require("md5-file"));
const follow_redirects_1 = require("follow-redirects");
const zlib_1 = require("zlib");
const tar_stream_1 = (0, tslib_1.__importDefault)(require("tar-stream"));
const yauzl_1 = (0, tslib_1.__importDefault)(require("yauzl"));
const MongoBinaryDownloadUrl_1 = (0, tslib_1.__importDefault)(require("./MongoBinaryDownloadUrl"));
const tar_stream_1 = tslib_1.__importDefault(require("tar-stream"));
const yauzl_1 = tslib_1.__importDefault(require("yauzl"));
const MongoBinaryDownloadUrl_1 = tslib_1.__importDefault(require("./MongoBinaryDownloadUrl"));
const https_proxy_agent_1 = require("https-proxy-agent");
const resolveConfig_1 = (0, tslib_1.__importStar)(require("./resolveConfig"));
const debug_1 = (0, tslib_1.__importDefault)(require("debug"));
const resolveConfig_1 = tslib_1.__importStar(require("./resolveConfig"));
const debug_1 = tslib_1.__importDefault(require("debug"));
const utils_1 = require("./utils");

@@ -28,17 +27,15 @@ const DryMongoBinary_1 = require("./DryMongoBinary");

class MongoBinaryDownload {
// end get/set backwards compat section
constructor(opts) {
var _a, _b, _c, _d, _e, _f;
(0, utils_1.assertion)(typeof opts.downloadDir === 'string', new Error('An DownloadDir must be specified!'));
const version = (_a = opts.version) !== null && _a !== void 0 ? _a : (0, resolveConfig_1.default)(resolveConfig_1.ResolveConfigVariables.VERSION);
const version = opts.version ?? (0, resolveConfig_1.default)(resolveConfig_1.ResolveConfigVariables.VERSION);
(0, utils_1.assertion)(typeof version === 'string', new Error('An MongoDB Binary version must be specified!'));
// DryMongoBinary.generateOptions cannot be used here, because its async
this.binaryOpts = {
platform: (_b = opts.platform) !== null && _b !== void 0 ? _b : os_1.default.platform(),
arch: (_c = opts.arch) !== null && _c !== void 0 ? _c : os_1.default.arch(),
platform: opts.platform ?? os_1.default.platform(),
arch: opts.arch ?? os_1.default.arch(),
version: version,
downloadDir: opts.downloadDir,
checkMD5: (_d = opts.checkMD5) !== null && _d !== void 0 ? _d : (0, resolveConfig_1.envToBool)((0, resolveConfig_1.default)(resolveConfig_1.ResolveConfigVariables.MD5_CHECK)),
systemBinary: (_e = opts.systemBinary) !== null && _e !== void 0 ? _e : '',
os: (_f = opts.os) !== null && _f !== void 0 ? _f : { os: 'unknown' },
checkMD5: opts.checkMD5 ?? (0, resolveConfig_1.envToBool)((0, resolveConfig_1.default)(resolveConfig_1.ResolveConfigVariables.MD5_CHECK)),
systemBinary: opts.systemBinary ?? '',
os: opts.os ?? { os: 'unknown' },
};

@@ -52,34 +49,2 @@ this.dlProgress = {

}
// TODO: for an major version, remove the compat get/set
// the following get/set are to not break existing stuff
get checkMD5() {
return this.binaryOpts.checkMD5;
}
set checkMD5(val) {
this.binaryOpts.checkMD5 = val;
}
get downloadDir() {
return this.binaryOpts.downloadDir;
}
set downloadDir(val) {
this.binaryOpts.downloadDir = val;
}
get arch() {
return this.binaryOpts.arch;
}
set arch(val) {
this.binaryOpts.arch = val;
}
get version() {
return this.binaryOpts.version;
}
set version(val) {
this.binaryOpts.version = val;
}
get platform() {
return this.binaryOpts.platform;
}
set platform(val) {
this.binaryOpts.platform = val;
}
/**

@@ -89,7 +54,5 @@ * Get the full path with filename

*/
getPath() {
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
const opts = yield DryMongoBinary_1.DryMongoBinary.generateOptions(this.binaryOpts);
return DryMongoBinary_1.DryMongoBinary.combineBinaryName(this.downloadDir, yield DryMongoBinary_1.DryMongoBinary.getBinaryName(opts));
});
async getPath() {
const opts = await DryMongoBinary_1.DryMongoBinary.generateOptions(this.binaryOpts);
return DryMongoBinary_1.DryMongoBinary.combineBinaryName(this.binaryOpts.downloadDir, await DryMongoBinary_1.DryMongoBinary.getBinaryName(opts));
}

@@ -100,18 +63,16 @@ /**

*/
getMongodPath() {
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
log('getMongodPath');
const mongodPath = yield this.getPath();
if (yield (0, utils_1.pathExists)(mongodPath)) {
log(`getMongodPath: mongod path "${mongodPath}" already exists, using this`);
return mongodPath;
}
const mongoDBArchive = yield this.startDownload();
yield this.extract(mongoDBArchive);
yield fs_1.promises.unlink(mongoDBArchive);
if (yield (0, utils_1.pathExists)(mongodPath)) {
return mongodPath;
}
throw new Error(`Cannot find downloaded mongod binary by path "${mongodPath}"`);
});
async getMongodPath() {
log('getMongodPath');
const mongodPath = await this.getPath();
if (await (0, utils_1.pathExists)(mongodPath)) {
log(`getMongodPath: mongod path "${mongodPath}" already exists, using this`);
return mongodPath;
}
const mongoDBArchive = await this.startDownload();
await this.extract(mongoDBArchive);
await fs_1.promises.unlink(mongoDBArchive);
if (await (0, utils_1.pathExists)(mongodPath)) {
return mongodPath;
}
throw new Error(`Cannot find downloaded mongod binary by path "${mongodPath}"`);
}

@@ -122,20 +83,18 @@ /**

*/
startDownload() {
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
log('startDownload');
const mbdUrl = new MongoBinaryDownloadUrl_1.default(this.binaryOpts);
yield (0, utils_1.mkdir)(this.downloadDir);
try {
yield fs_1.promises.access(this.downloadDir, fs_1.constants.X_OK | fs_1.constants.W_OK); // check that this process has permissions to create files & modify file contents & read file contents
}
catch (err) {
console.error(`Download Directory at "${this.downloadDir}" does not have sufficient permissions to be used by this process\n` +
'Needed Permissions: Write & Execute (-wx)\n');
throw err;
}
const downloadUrl = yield mbdUrl.getDownloadUrl();
const mongoDBArchive = yield this.download(downloadUrl);
yield this.makeMD5check(`${downloadUrl}.md5`, mongoDBArchive);
return mongoDBArchive;
});
async startDownload() {
log('startDownload');
const mbdUrl = new MongoBinaryDownloadUrl_1.default(this.binaryOpts);
await (0, utils_1.mkdir)(this.binaryOpts.downloadDir);
try {
await fs_1.promises.access(this.binaryOpts.downloadDir, fs_1.constants.X_OK | fs_1.constants.W_OK); // check that this process has permissions to create files & modify file contents & read file contents
}
catch (err) {
console.error(`Download Directory at "${this.binaryOpts.downloadDir}" does not have sufficient permissions to be used by this process\n` +
'Needed Permissions: Write & Execute (-wx)\n');
throw err;
}
const downloadUrl = await mbdUrl.getDownloadUrl();
const mongoDBArchive = await this.download(downloadUrl);
await this.makeMD5check(`${downloadUrl}.md5`, mongoDBArchive);
return mongoDBArchive;
}

@@ -151,21 +110,19 @@ /**

*/
makeMD5check(urlForReferenceMD5, mongoDBArchive) {
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
log('makeMD5check: Checking MD5 of downloaded binary...');
if (!this.checkMD5) {
log('makeMD5check: checkMD5 is disabled');
return undefined;
}
const archiveMD5Path = yield this.download(urlForReferenceMD5);
const signatureContent = (yield fs_1.promises.readFile(archiveMD5Path)).toString('utf-8');
const regexMatch = signatureContent.match(/^\s*([\w\d]+)\s*/i);
const md5SigRemote = regexMatch ? regexMatch[1] : null;
const md5SigLocal = md5_file_1.default.sync(mongoDBArchive);
log(`makeMD5check: Local MD5: ${md5SigLocal}, Remote MD5: ${md5SigRemote}`);
if (md5SigRemote !== md5SigLocal) {
throw new errors_1.Md5CheckFailedError(md5SigLocal, md5SigRemote || 'unknown');
}
yield fs_1.promises.unlink(archiveMD5Path);
return true;
});
async makeMD5check(urlForReferenceMD5, mongoDBArchive) {
log('makeMD5check: Checking MD5 of downloaded binary...');
if (!this.binaryOpts.checkMD5) {
log('makeMD5check: checkMD5 is disabled');
return undefined;
}
const archiveMD5Path = await this.download(urlForReferenceMD5);
const signatureContent = (await fs_1.promises.readFile(archiveMD5Path)).toString('utf-8');
const regexMatch = signatureContent.match(/^\s*([\w\d]+)\s*/i);
const md5SigRemote = regexMatch ? regexMatch[1] : null;
const md5SigLocal = await (0, utils_1.md5FromFile)(mongoDBArchive);
log(`makeMD5check: Local MD5: ${md5SigLocal}, Remote MD5: ${md5SigRemote}`);
if (md5SigRemote !== md5SigLocal) {
throw new errors_1.Md5CheckFailedError(md5SigLocal, md5SigRemote || 'unknown');
}
await fs_1.promises.unlink(archiveMD5Path);
return true;
}

@@ -177,37 +134,35 @@ /**

*/
download(downloadUrl) {
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
log('download');
const proxy = process.env['yarn_https-proxy'] ||
process.env.yarn_proxy ||
process.env['npm_config_https-proxy'] ||
process.env.npm_config_proxy ||
process.env.https_proxy ||
process.env.http_proxy ||
process.env.HTTPS_PROXY ||
process.env.HTTP_PROXY;
const strictSsl = process.env.npm_config_strict_ssl === 'true';
const urlObject = new url_1.URL(downloadUrl);
urlObject.port = urlObject.port || '443';
const requestOptions = {
method: 'GET',
rejectUnauthorized: strictSsl,
protocol: (0, resolveConfig_1.envToBool)((0, resolveConfig_1.default)(resolveConfig_1.ResolveConfigVariables.USE_HTTP)) ? 'http:' : 'https:',
agent: proxy ? new https_proxy_agent_1.HttpsProxyAgent(proxy) : undefined,
};
const filename = urlObject.pathname.split('/').pop();
if (!filename) {
throw new Error(`MongoBinaryDownload: missing filename for url "${downloadUrl}"`);
}
const downloadLocation = path_1.default.resolve(this.downloadDir, filename);
const tempDownloadLocation = path_1.default.resolve(this.downloadDir, `${filename}.downloading`);
log(`download: Downloading${proxy ? ` via proxy "${proxy}"` : ''}: "${downloadUrl}"`);
if (yield (0, utils_1.pathExists)(downloadLocation)) {
log('download: Already downloaded archive found, skipping download');
return downloadLocation;
}
this.assignDownloadingURL(urlObject);
const downloadedFile = yield this.httpDownload(urlObject, requestOptions, downloadLocation, tempDownloadLocation);
return downloadedFile;
});
async download(downloadUrl) {
log('download');
const proxy = process.env['yarn_https-proxy'] ||
process.env.yarn_proxy ||
process.env['npm_config_https-proxy'] ||
process.env.npm_config_proxy ||
process.env.https_proxy ||
process.env.http_proxy ||
process.env.HTTPS_PROXY ||
process.env.HTTP_PROXY;
const strictSsl = process.env.npm_config_strict_ssl === 'true';
const urlObject = new url_1.URL(downloadUrl);
urlObject.port = urlObject.port || '443';
const requestOptions = {
method: 'GET',
rejectUnauthorized: strictSsl,
protocol: (0, resolveConfig_1.envToBool)((0, resolveConfig_1.default)(resolveConfig_1.ResolveConfigVariables.USE_HTTP)) ? 'http:' : 'https:',
agent: proxy ? new https_proxy_agent_1.HttpsProxyAgent(proxy) : undefined,
};
const filename = urlObject.pathname.split('/').pop();
if (!filename) {
throw new Error(`MongoBinaryDownload: missing filename for url "${downloadUrl}"`);
}
const downloadLocation = path_1.default.resolve(this.binaryOpts.downloadDir, filename);
const tempDownloadLocation = path_1.default.resolve(this.binaryOpts.downloadDir, `${filename}.downloading`);
log(`download: Downloading${proxy ? ` via proxy "${proxy}"` : ''}: "${downloadUrl}"`);
if (await (0, utils_1.pathExists)(downloadLocation)) {
log('download: Already downloaded archive found, skipping download');
return downloadLocation;
}
this.assignDownloadingURL(urlObject);
const downloadedFile = await this.httpDownload(urlObject, requestOptions, downloadLocation, tempDownloadLocation);
return downloadedFile;
}

@@ -219,24 +174,21 @@ /**

*/
extract(mongoDBArchive) {
var _a, _b;
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
log('extract');
const mongodbFullPath = yield this.getPath();
log(`extract: archive: "${mongoDBArchive}" final: "${mongodbFullPath}"`);
yield (0, utils_1.mkdir)(path_1.default.dirname(mongodbFullPath));
const filter = (file) => /(?:bin\/(?:mongod(?:\.exe)?))$/i.test(file);
if (/(.tar.gz|.tgz)$/.test(mongoDBArchive)) {
yield this.extractTarGz(mongoDBArchive, mongodbFullPath, filter);
}
else if (/.zip$/.test(mongoDBArchive)) {
yield this.extractZip(mongoDBArchive, mongodbFullPath, filter);
}
else {
throw new Error(`MongoBinaryDownload: unsupported archive "${mongoDBArchive}" (downloaded from "${(_a = this._downloadingUrl) !== null && _a !== void 0 ? _a : 'unknown'}"). Broken archive from MongoDB Provider?`);
}
if (!(yield (0, utils_1.pathExists)(mongodbFullPath))) {
throw new Error(`MongoBinaryDownload: missing mongod binary in "${mongoDBArchive}" (downloaded from "${(_b = this._downloadingUrl) !== null && _b !== void 0 ? _b : 'unknown'}"). Broken archive from MongoDB Provider?`);
}
return mongodbFullPath;
});
async extract(mongoDBArchive) {
log('extract');
const mongodbFullPath = await this.getPath();
log(`extract: archive: "${mongoDBArchive}" final: "${mongodbFullPath}"`);
await (0, utils_1.mkdir)(path_1.default.dirname(mongodbFullPath));
const filter = (file) => /(?:bin\/(?:mongod(?:\.exe)?))$/i.test(file);
if (/(.tar.gz|.tgz)$/.test(mongoDBArchive)) {
await this.extractTarGz(mongoDBArchive, mongodbFullPath, filter);
}
else if (/.zip$/.test(mongoDBArchive)) {
await this.extractZip(mongoDBArchive, mongodbFullPath, filter);
}
else {
throw new Error(`MongoBinaryDownload: unsupported archive "${mongoDBArchive}" (downloaded from "${this._downloadingUrl ?? 'unknown'}"). Broken archive from MongoDB Provider?`);
}
if (!(await (0, utils_1.pathExists)(mongodbFullPath))) {
throw new Error(`MongoBinaryDownload: missing mongod binary in "${mongoDBArchive}" (downloaded from "${this._downloadingUrl ?? 'unknown'}"). Broken archive from MongoDB Provider?`);
}
return mongodbFullPath;
}

@@ -249,31 +201,29 @@ /**

*/
extractTarGz(mongoDBArchive, extractPath, filter) {
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
log('extractTarGz');
const extract = tar_stream_1.default.extract();
extract.on('entry', (header, stream, next) => {
if (filter(header.name)) {
stream.pipe((0, fs_1.createWriteStream)(extractPath, {
mode: 0o775,
}));
}
stream.on('end', () => next());
stream.resume();
});
return new Promise((res, rej) => {
(0, fs_1.createReadStream)(mongoDBArchive)
.on('error', (err) => {
rej(new errors_1.GenericMMSError('Unable to open tarball ' + mongoDBArchive + ': ' + err));
})
.pipe((0, zlib_1.createUnzip)())
.on('error', (err) => {
rej(new errors_1.GenericMMSError('Error during unzip for ' + mongoDBArchive + ': ' + err));
})
.pipe(extract)
.on('error', (err) => {
rej(new errors_1.GenericMMSError('Error during untar for ' + mongoDBArchive + ': ' + err));
})
.on('finish', res);
});
async extractTarGz(mongoDBArchive, extractPath, filter) {
log('extractTarGz');
const extract = tar_stream_1.default.extract();
extract.on('entry', (header, stream, next) => {
if (filter(header.name)) {
stream.pipe((0, fs_1.createWriteStream)(extractPath, {
mode: 0o775,
}));
}
stream.on('end', () => next());
stream.resume();
});
return new Promise((res, rej) => {
(0, fs_1.createReadStream)(mongoDBArchive)
.on('error', (err) => {
rej(new errors_1.GenericMMSError('Unable to open tarball ' + mongoDBArchive + ': ' + err));
})
.pipe((0, zlib_1.createUnzip)())
.on('error', (err) => {
rej(new errors_1.GenericMMSError('Error during unzip for ' + mongoDBArchive + ': ' + err));
})
.pipe(extract)
.on('error', (err) => {
rej(new errors_1.GenericMMSError('Error during untar for ' + mongoDBArchive + ': ' + err));
})
.on('finish', res);
});
}

@@ -286,25 +236,23 @@ /**

*/
extractZip(mongoDBArchive, extractPath, filter) {
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
log('extractZip');
return new Promise((resolve, reject) => {
yauzl_1.default.open(mongoDBArchive, { lazyEntries: true }, (err, zipfile) => {
if (err || !zipfile) {
return reject(err);
async extractZip(mongoDBArchive, extractPath, filter) {
log('extractZip');
return new Promise((resolve, reject) => {
yauzl_1.default.open(mongoDBArchive, { lazyEntries: true }, (err, zipfile) => {
if (err || !zipfile) {
return reject(err);
}
zipfile.readEntry();
zipfile.on('end', () => resolve());
zipfile.on('entry', (entry) => {
if (!filter(entry.fileName)) {
return zipfile.readEntry();
}
zipfile.readEntry();
zipfile.on('end', () => resolve());
zipfile.on('entry', (entry) => {
if (!filter(entry.fileName)) {
return zipfile.readEntry();
zipfile.openReadStream(entry, (err2, r) => {
if (err2 || !r) {
return reject(err2);
}
zipfile.openReadStream(entry, (err2, r) => {
if (err2 || !r) {
return reject(err2);
}
r.on('end', () => zipfile.readEntry());
r.pipe((0, fs_1.createWriteStream)(extractPath, {
mode: 0o775,
}));
});
r.on('end', () => zipfile.readEntry());
r.pipe((0, fs_1.createWriteStream)(extractPath, {
mode: 0o775,
}));
});

@@ -321,55 +269,55 @@ });

*/
httpDownload(url, httpOptions, downloadLocation, tempDownloadLocation) {
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
log('httpDownload');
const downloadUrl = this.assignDownloadingURL(url);
const maxRedirects = parseInt((0, resolveConfig_1.default)(resolveConfig_1.ResolveConfigVariables.MAX_REDIRECTS) || '');
const useHttpsOptions = Object.assign({ maxRedirects: Number.isNaN(maxRedirects) ? 2 : maxRedirects }, httpOptions);
return new Promise((resolve, reject) => {
log(`httpDownload: trying to download "${downloadUrl}"`);
follow_redirects_1.https
.get(url, useHttpsOptions, (response) => {
if (response.statusCode != 200) {
if (response.statusCode === 403) {
reject(new errors_1.DownloadError(downloadUrl, "Status Code is 403 (MongoDB's 404)\n" +
"This means that the requested version-platform combination doesn't exist\n" +
"Try to use different version 'new MongoMemoryServer({ binary: { version: 'X.Y.Z' } })'\n" +
'List of available versions can be found here: ' +
'https://www.mongodb.com/download-center/community/releases/archive'));
return;
}
reject(new errors_1.DownloadError(downloadUrl, `Status Code isnt 200! (it is ${response.statusCode})`));
async httpDownload(url, httpOptions, downloadLocation, tempDownloadLocation) {
log('httpDownload');
const downloadUrl = this.assignDownloadingURL(url);
const maxRedirects = parseInt((0, resolveConfig_1.default)(resolveConfig_1.ResolveConfigVariables.MAX_REDIRECTS) || '');
const useHttpsOptions = {
maxRedirects: Number.isNaN(maxRedirects) ? 2 : maxRedirects,
...httpOptions,
};
return new Promise((resolve, reject) => {
log(`httpDownload: trying to download "${downloadUrl}"`);
follow_redirects_1.https
.get(url, useHttpsOptions, (response) => {
if (response.statusCode != 200) {
if (response.statusCode === 403) {
reject(new errors_1.DownloadError(downloadUrl, "Status Code is 403 (MongoDB's 404)\n" +
"This means that the requested version-platform combination doesn't exist\n" +
"Try to use different version 'new MongoMemoryServer({ binary: { version: 'X.Y.Z' } })'\n" +
'List of available versions can be found here: ' +
'https://www.mongodb.com/download-center/community/releases/archive'));
return;
}
if (typeof response.headers['content-length'] != 'string') {
reject(new errors_1.DownloadError(downloadUrl, 'Response header "content-length" is empty!'));
reject(new errors_1.DownloadError(downloadUrl, `Status Code isnt 200! (it is ${response.statusCode})`));
return;
}
if (typeof response.headers['content-length'] != 'string') {
reject(new errors_1.DownloadError(downloadUrl, 'Response header "content-length" is empty!'));
return;
}
this.dlProgress.current = 0;
this.dlProgress.length = parseInt(response.headers['content-length'], 10);
this.dlProgress.totalMb = Math.round((this.dlProgress.length / 1048576) * 10) / 10;
const fileStream = (0, fs_1.createWriteStream)(tempDownloadLocation);
response.pipe(fileStream);
fileStream.on('finish', async () => {
if (this.dlProgress.current < this.dlProgress.length &&
!httpOptions.path?.endsWith('.md5')) {
reject(new errors_1.DownloadError(downloadUrl, `Too small (${this.dlProgress.current} bytes) mongod binary downloaded.`));
return;
}
this.dlProgress.current = 0;
this.dlProgress.length = parseInt(response.headers['content-length'], 10);
this.dlProgress.totalMb = Math.round((this.dlProgress.length / 1048576) * 10) / 10;
const fileStream = (0, fs_1.createWriteStream)(tempDownloadLocation);
response.pipe(fileStream);
fileStream.on('finish', () => (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
var _a;
if (this.dlProgress.current < this.dlProgress.length &&
!((_a = httpOptions.path) === null || _a === void 0 ? void 0 : _a.endsWith('.md5'))) {
reject(new errors_1.DownloadError(downloadUrl, `Too small (${this.dlProgress.current} bytes) mongod binary downloaded.`));
return;
}
this.printDownloadProgress({ length: 0 }, true);
fileStream.close();
yield fs_1.promises.rename(tempDownloadLocation, downloadLocation);
log(`httpDownload: moved "${tempDownloadLocation}" to "${downloadLocation}"`);
resolve(downloadLocation);
}));
response.on('data', (chunk) => {
this.printDownloadProgress(chunk);
});
})
.on('error', (err) => {
// log it without having debug enabled
console.error(`Couldnt download "${downloadUrl}"!`, err.message);
reject(new errors_1.DownloadError(downloadUrl, err.message));
this.printDownloadProgress({ length: 0 }, true);
fileStream.close();
await fs_1.promises.rename(tempDownloadLocation, downloadLocation);
log(`httpDownload: moved "${tempDownloadLocation}" to "${downloadLocation}"`);
resolve(downloadLocation);
});
response.on('data', (chunk) => {
this.printDownloadProgress(chunk);
});
})
.on('error', (err) => {
// log it without having debug enabled
console.error(`Couldnt download "${downloadUrl}"!`, err.message);
reject(new errors_1.DownloadError(downloadUrl, err.message));
});

@@ -391,4 +339,4 @@ });

const mbComplete = Math.round((this.dlProgress.current / 1048576) * 10) / 10;
const crReturn = this.platform === 'win32' ? '\x1b[0G' : '\r';
const message = `Downloading MongoDB "${this.version}": ${percentComplete}% (${mbComplete}mb / ${this.dlProgress.totalMb}mb)${crReturn}`;
const crReturn = this.binaryOpts.platform === 'win32' ? '\x1b[0G' : '\r';
const message = `Downloading MongoDB "${this.binaryOpts.version}": ${percentComplete}% (${mbComplete}mb / ${this.dlProgress.totalMb}mb)${crReturn}`;
if (process.stdout.isTTY) {

@@ -395,0 +343,0 @@ // if TTY overwrite last line over and over until finished and clear line to avoid residual characters

@@ -8,2 +8,4 @@ import { AnyOS, LinuxOS } from './getos';

}
/** Set the default ubuntu version number */
export declare const DEFAULT_UBUNTU_YEAR = 22;
/**

@@ -72,6 +74,2 @@ * Download URL generator

/**
* Linux Fallback
*/
getLegacyVersionString(): string;
/**
* Get the version string for Suse / OpenSuse

@@ -99,5 +97,5 @@ * @param os LinuxOS Object

*/
static translateArch(arch: string, mongoPlatform: string): string;
static translateArch(arch: string): string;
}
export default MongoBinaryDownloadUrl;
//# sourceMappingURL=MongoBinaryDownloadUrl.d.ts.map
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.MongoBinaryDownloadUrl = void 0;
exports.MongoBinaryDownloadUrl = exports.DEFAULT_UBUNTU_YEAR = void 0;
const tslib_1 = require("tslib");
const getos_1 = require("./getos");
const resolveConfig_1 = require("./resolveConfig");
const debug_1 = (0, tslib_1.__importDefault)(require("debug"));
const semver = (0, tslib_1.__importStar)(require("semver"));
const debug_1 = tslib_1.__importDefault(require("debug"));
const semver = tslib_1.__importStar(require("semver"));
const utils_1 = require("./utils");
const url_1 = require("url");
const errors_1 = require("./errors");
const util_1 = require("util");
const log = (0, debug_1.default)('MongoMS:MongoBinaryDownloadUrl');
/** Set the default ubuntu version number */
exports.DEFAULT_UBUNTU_YEAR = 22; // TODO: try to keep this up-to-date to the latest LTS
/**

@@ -21,3 +22,3 @@ * Download URL generator

this.platform = this.translatePlatform(opts.platform);
this.arch = MongoBinaryDownloadUrl.translateArch(opts.arch, this.platform);
this.arch = MongoBinaryDownloadUrl.translateArch(opts.arch);
this.os = opts.os;

@@ -29,24 +30,21 @@ }

*/
getDownloadUrl() {
var _a;
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
const downloadUrl = (0, resolveConfig_1.resolveConfig)(resolveConfig_1.ResolveConfigVariables.DOWNLOAD_URL);
if (downloadUrl) {
log(`Using "${downloadUrl}" as the Download-URL`);
const url = new url_1.URL(downloadUrl); // check if this is an valid url
return url.toString();
}
const archive = yield this.getArchiveName();
log(`Using "${archive}" as the Archive String`);
const mirror = (_a = (0, resolveConfig_1.resolveConfig)(resolveConfig_1.ResolveConfigVariables.DOWNLOAD_MIRROR)) !== null && _a !== void 0 ? _a : 'https://fastdl.mongodb.org';
log(`Using "${mirror}" as the mirror`);
const url = new url_1.URL(mirror);
// ensure that the "mirror" path ends with "/"
if (!url.pathname.endsWith('/')) {
url.pathname = url.pathname + '/';
}
// no extra "/" between "pathname" and "platfrom", because of the "if" statement above to ensure "url.pathname" to end with "/"
url.pathname = `${url.pathname}${this.platform}/${archive}`;
async getDownloadUrl() {
const downloadUrl = (0, resolveConfig_1.resolveConfig)(resolveConfig_1.ResolveConfigVariables.DOWNLOAD_URL);
if (downloadUrl) {
log(`Using "${downloadUrl}" as the Download-URL`);
const url = new url_1.URL(downloadUrl); // check if this is an valid url
return url.toString();
});
}
const archive = await this.getArchiveName();
log(`Using "${archive}" as the Archive String`);
const mirror = (0, resolveConfig_1.resolveConfig)(resolveConfig_1.ResolveConfigVariables.DOWNLOAD_MIRROR) ?? 'https://fastdl.mongodb.org';
log(`Using "${mirror}" as the mirror`);
const url = new url_1.URL(mirror);
// ensure that the "mirror" path ends with "/"
if (!url.pathname.endsWith('/')) {
url.pathname = url.pathname + '/';
}
// no extra "/" between "pathname" and "platfrom", because of the "if" statement above to ensure "url.pathname" to end with "/"
url.pathname = `${url.pathname}${this.platform}/${archive}`;
return url.toString();
}

@@ -56,21 +54,19 @@ /**

*/
getArchiveName() {
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
const archive_name = (0, resolveConfig_1.resolveConfig)(resolveConfig_1.ResolveConfigVariables.ARCHIVE_NAME);
// double-"!" to not include falsy values
if (!!archive_name) {
return archive_name;
}
switch (this.platform) {
case 'osx':
return this.getArchiveNameOsx();
case 'win32':
case 'windows':
return this.getArchiveNameWin();
case 'linux':
return this.getArchiveNameLinux();
default:
throw new errors_1.UnknownPlatformError(this.platform);
}
});
async getArchiveName() {
const archive_name = (0, resolveConfig_1.resolveConfig)(resolveConfig_1.ResolveConfigVariables.ARCHIVE_NAME);
// double-"!" to not include falsy values
if (!!archive_name) {
return archive_name;
}
switch (this.platform) {
case 'osx':
return this.getArchiveNameOsx();
case 'win32':
case 'windows':
return this.getArchiveNameWin();
case 'linux':
return this.getArchiveNameLinux();
default:
throw new errors_1.UnknownPlatformError(this.platform);
}
}

@@ -128,23 +124,18 @@ /**

*/
getArchiveNameLinux() {
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
let osString;
// the highest version for "i686" seems to be 3.3
if (this.arch !== 'i686') {
if (!this.os && (0, resolveConfig_1.resolveConfig)(resolveConfig_1.ResolveConfigVariables.DISTRO)) {
this.os = yield (0, getos_1.getOS)();
}
if ((0, resolveConfig_1.resolveConfig)(resolveConfig_1.ResolveConfigVariables.DISTRO)) {
this.overwriteDistro();
}
osString = this.getLinuxOSVersionString(this.os);
}
// this is below, to allow overwriting the arch (like arm64 to aarch64)
let name = `mongodb-linux-${this.arch}`;
if (!!osString) {
name += `-${osString}`;
}
name += `-${this.version}.tgz`;
return name;
});
async getArchiveNameLinux() {
if (!this.os && (0, resolveConfig_1.resolveConfig)(resolveConfig_1.ResolveConfigVariables.DISTRO)) {
this.os = await (0, getos_1.getOS)();
}
if ((0, resolveConfig_1.resolveConfig)(resolveConfig_1.ResolveConfigVariables.DISTRO)) {
this.overwriteDistro();
}
const osString = this.getLinuxOSVersionString(this.os);
// this is below, to allow overwriting the arch (like arm64 to aarch64)
let name = `mongodb-linux-${this.arch}`;
// guard against any falsy values
if (!!osString) {
name += `-${osString}`;
}
name += `-${this.version}.tgz`;
return name;
}

@@ -179,3 +170,2 @@ /**

getLinuxOSVersionString(os) {
var _a;
if (regexHelper(/ubuntu/i, os)) {

@@ -225,5 +215,4 @@ return this.getUbuntuVersionString(os);

}
// warn for the fallback
console.warn(`Unknown/unsupported linux "${os.dist}(${(_a = os.id_like) === null || _a === void 0 ? void 0 : _a.join(', ')})". Falling back to legacy MongoDB build!`);
return this.getLegacyVersionString();
// mongodb does not ship generic linux builds anymore
throw new errors_1.UnknownLinuxDistro(os.dist, os.id_like ?? []);
}

@@ -365,8 +354,2 @@ /**

/**
* Linux Fallback
*/
getLegacyVersionString() {
return '';
}
/**
* Get the version string for Suse / OpenSuse

@@ -430,3 +413,3 @@ * @param os LinuxOS Object

dist: 'ubuntu',
release: '20.04', // TODO: try to keep this up-to-date to the latest LTS
release: `${exports.DEFAULT_UBUNTU_YEAR}.04`,
};

@@ -438,3 +421,7 @@ }

}
const ubuntuYear = parseInt(ubuntuOS.release.split('.')[0], 10);
let ubuntuYear = parseInt(ubuntuOS.release.split('.')[0], 10);
if (Number.isNaN(ubuntuYear)) {
console.warn(`Could not parse ubuntu year from "${ubuntuOS.release}", using default`);
ubuntuYear = exports.DEFAULT_UBUNTU_YEAR;
}
if (this.arch === 'aarch64') {

@@ -470,5 +457,4 @@ // this is because, before version 4.1.10, everything for "arm64" / "aarch64" were just "arm64" and for "ubuntu1604"

}
// TODO: change or remove "14" default, since it no-longer is supported above 4.0
// the "04" version always exists for ubuntu, use that as default
return `ubuntu${ubuntuYear || 14}04`;
return `ubuntu${ubuntuYear}04`;
}

@@ -492,7 +478,3 @@ /**

case 'linux':
case 'elementary OS':
return 'linux';
case 'sunos':
(0, util_1.deprecate)(() => { }, 'mongodb-memory-server will fully drop support for sunos in 9.0', 'MMS002')();
return 'sunos5';
default:

@@ -508,13 +490,4 @@ throw new errors_1.UnknownPlatformError(platform);

*/
static translateArch(arch, mongoPlatform) {
static translateArch(arch) {
switch (arch) {
case 'ia32':
(0, util_1.deprecate)(() => { }, 'mongodb-memory-server will fully drop support for ia32 in 9.0', 'MMS001')();
if (mongoPlatform === 'linux') {
return 'i686';
}
else if (mongoPlatform === 'win32') {
return 'i386';
}
throw new errors_1.UnknownArchitectureError(arch, mongoPlatform);
case 'x86_64':

@@ -521,0 +494,0 @@ case 'x64':

/// <reference types="node" />
/// <reference types="node" />
/// <reference types="node" />
import { ChildProcess, SpawnOptions } from 'child_process';

@@ -7,3 +9,3 @@ import { MongoBinaryOpts } from './MongoBinary';

import { MongoClientOptions } from 'mongodb';
export declare type StorageEngine = 'devnull' | 'ephemeralForTest' | 'mmapv1' | 'wiredTiger';
export type StorageEngine = 'ephemeralForTest' | 'wiredTiger';
/**

@@ -10,0 +12,0 @@ * Overwrite replica member-specific configuration

@@ -6,5 +6,5 @@ "use strict";

const child_process_1 = require("child_process");
const path = (0, tslib_1.__importStar)(require("path"));
const path = tslib_1.__importStar(require("path"));
const MongoBinary_1 = require("./MongoBinary");
const debug_1 = (0, tslib_1.__importDefault)(require("debug"));
const debug_1 = tslib_1.__importDefault(require("debug"));
const utils_1 = require("./utils");

@@ -17,4 +17,4 @@ const semver_1 = require("semver");

/* istanbul ignore next */
if ((0, semver_1.lt)(process.version, '12.22.0')) {
console.warn('Using NodeJS below 12.22.0');
if ((0, semver_1.lt)(process.version, '14.20.1')) {
console.warn('Using NodeJS below 14.20.1');
}

@@ -57,5 +57,5 @@ const log = (0, debug_1.default)('MongoMS:MongoInstance');

this.isReplSet = false;
this.instanceOpts = Object.assign({}, opts.instance);
this.binaryOpts = Object.assign({}, opts.binary);
this.spawnOpts = Object.assign({}, opts.spawn);
this.instanceOpts = { ...opts.instance };
this.binaryOpts = { ...opts.binary };
this.spawnOpts = { ...opts.spawn };
this.on(MongoInstanceEvents.instanceReady, () => {

@@ -65,8 +65,8 @@ this.isInstanceReady = true;

});
this.on(MongoInstanceEvents.instanceError, (err) => (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
this.on(MongoInstanceEvents.instanceError, async (err) => {
this.debug(`constructor: Instance has thrown an Error: ${err.toString()}`);
this.isInstanceReady = false;
this.isInstancePrimary = false;
yield this.stop();
}));
await this.stop();
});
}

@@ -78,4 +78,3 @@ /**

debug(msg, ...extra) {
var _a;
const port = (_a = this.instanceOpts.port) !== null && _a !== void 0 ? _a : 'unknown';
const port = this.instanceOpts.port ?? 'unknown';
log(`Mongo[${port}]: ${msg}`, ...extra);

@@ -87,9 +86,7 @@ }

*/
static create(opts) {
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
log('create: Called .create() method');
const instance = new this(opts);
yield instance.start();
return instance;
});
static async create(opts) {
log('create: Called .create() method');
const instance = new this(opts);
await instance.start();
return instance;
}

@@ -100,3 +97,2 @@ /**

prepareCommandArgs() {
var _a;
this.debug('prepareCommandArgs');

@@ -129,3 +125,3 @@ (0, utils_1.assertion)(!(0, utils_1.isNullOrUndefined)(this.instanceOpts.port), new Error('"instanceOpts.port" is required to be set!'));

}
const final = result.concat((_a = this.instanceOpts.args) !== null && _a !== void 0 ? _a : []);
const final = result.concat(this.instanceOpts.args ?? []);
this.debug('prepareCommandArgs: final argument array:' + JSON.stringify(final));

@@ -138,40 +134,41 @@ return final;

*/
start() {
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
this.debug('start');
this.isInstancePrimary = false;
this.isInstanceReady = false;
this.isReplSet = false;
let timeout;
const mongoBin = yield MongoBinary_1.MongoBinary.getPath(this.binaryOpts);
yield (0, utils_1.checkBinaryPermissions)(mongoBin);
const launch = new Promise((res, rej) => {
this.once(MongoInstanceEvents.instanceReady, res);
this.once(MongoInstanceEvents.instanceError, rej);
this.once(MongoInstanceEvents.instanceClosed, function launchInstanceClosed() {
rej(new Error('Instance Exited before being ready and without throwing an error!'));
});
// extra conditions just to be sure that the custom defined timeout is valid
const timeoutTime = !!this.instanceOpts.launchTimeout && this.instanceOpts.launchTimeout >= 1000
? this.instanceOpts.launchTimeout
: 1000 * 10; // default 10 seconds
timeout = setTimeout(() => {
const err = new errors_1.GenericMMSError(`Instance failed to start within ${timeoutTime}ms`);
this.emit(MongoInstanceEvents.instanceError, err);
rej(err);
}, timeoutTime);
}).finally(() => {
// always clear the timeout after the promise somehow resolves
clearTimeout(timeout);
async start() {
this.debug('start');
if (!(0, utils_1.isNullOrUndefined)(this.mongodProcess?.pid)) {
throw new errors_1.GenericMMSError(`Cannot run "MongoInstance.start" because "mongodProcess.pid" is still defined (pid: ${this.mongodProcess?.pid})`);
}
this.isInstancePrimary = false;
this.isInstanceReady = false;
this.isReplSet = false;
let timeout;
const mongoBin = await MongoBinary_1.MongoBinary.getPath(this.binaryOpts);
await (0, utils_1.checkBinaryPermissions)(mongoBin);
const launch = new Promise((res, rej) => {
this.once(MongoInstanceEvents.instanceReady, res);
this.once(MongoInstanceEvents.instanceError, rej);
this.once(MongoInstanceEvents.instanceClosed, function launchInstanceClosed() {
rej(new Error('Instance Exited before being ready and without throwing an error!'));
});
this.debug('start: Starting Processes');
this.mongodProcess = this._launchMongod(mongoBin);
// This assertion is here because somewhere between nodejs 12 and 16 the types for "childprocess.pid" changed to include "| undefined"
// it is tested and a error is thrown in "this_launchMongod", but typescript somehow does not see this yet as of 4.3.5
(0, utils_1.assertion)(!(0, utils_1.isNullOrUndefined)(this.mongodProcess.pid), new Error('MongoD Process failed to spawn'));
this.killerProcess = this._launchKiller(process.pid, this.mongodProcess.pid);
yield launch;
this.emit(MongoInstanceEvents.instanceStarted);
this.debug('start: Processes Started');
// extra conditions just to be sure that the custom defined timeout is valid
const timeoutTime = !!this.instanceOpts.launchTimeout && this.instanceOpts.launchTimeout >= 1000
? this.instanceOpts.launchTimeout
: 1000 * 10; // default 10 seconds
timeout = setTimeout(() => {
const err = new errors_1.GenericMMSError(`Instance failed to start within ${timeoutTime}ms`);
this.emit(MongoInstanceEvents.instanceError, err);
rej(err);
}, timeoutTime);
}).finally(() => {
// always clear the timeout after the promise somehow resolves
clearTimeout(timeout);
});
this.debug('start: Starting Processes');
this.mongodProcess = this._launchMongod(mongoBin);
// This assertion is here because somewhere between nodejs 12 and 16 the types for "childprocess.pid" changed to include "| undefined"
// it is tested and a error is thrown in "this_launchMongod", but typescript somehow does not see this yet as of 4.3.5
(0, utils_1.assertion)(!(0, utils_1.isNullOrUndefined)(this.mongodProcess.pid), new Error('MongoD Process failed to spawn'));
this.killerProcess = this._launchKiller(process.pid, this.mongodProcess.pid);
await launch;
this.emit(MongoInstanceEvents.instanceStarted);
this.debug('start: Processes Started');
}

@@ -181,59 +178,60 @@ /**

*/
stop() {
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
this.debug('stop');
if (!this.mongodProcess && !this.killerProcess) {
this.debug('stop: nothing to shutdown, returning');
return false;
}
if (!(0, utils_1.isNullOrUndefined)(this.mongodProcess)) {
// try to run "shutdown" before running "killProcess" (gracefull "SIGINT")
// using this, otherwise on windows nodejs will handle "SIGINT" & "SIGTERM" & "SIGKILL" the same (instant exit)
if (this.isReplSet) {
let con;
try {
this.debug('stop: trying shutdownServer');
const port = this.instanceOpts.port;
const ip = this.instanceOpts.ip;
(0, utils_1.assertion)(!(0, utils_1.isNullOrUndefined)(port), new Error('Cannot shutdown replset gracefully, no "port" is provided'));
(0, utils_1.assertion)(!(0, utils_1.isNullOrUndefined)(ip), new Error('Cannot shutdown replset gracefully, no "ip" is provided'));
con = yield mongodb_1.MongoClient.connect((0, utils_1.uriTemplate)(ip, port, 'admin'), Object.assign(Object.assign({}, this.extraConnectionOptions), { directConnection: true }));
const admin = con.db('admin'); // just to ensure it is actually the "admin" database
// "timeoutSecs" is set to "1" otherwise it will take at least "10" seconds to stop (very long tests)
yield admin.command({ shutdown: 1, force: true, timeoutSecs: 1 });
this.debug('stop: after admin shutdown command');
async stop() {
this.debug('stop');
if (!this.mongodProcess && !this.killerProcess) {
this.debug('stop: nothing to shutdown, returning');
return false;
}
if (!(0, utils_1.isNullOrUndefined)(this.mongodProcess)) {
// try to run "shutdown" before running "killProcess" (gracefull "SIGINT")
// using this, otherwise on windows nodejs will handle "SIGINT" & "SIGTERM" & "SIGKILL" the same (instant exit)
if (this.isReplSet) {
let con;
try {
this.debug('stop: trying shutdownServer');
const port = this.instanceOpts.port;
const ip = this.instanceOpts.ip;
(0, utils_1.assertion)(!(0, utils_1.isNullOrUndefined)(port), new Error('Cannot shutdown replset gracefully, no "port" is provided'));
(0, utils_1.assertion)(!(0, utils_1.isNullOrUndefined)(ip), new Error('Cannot shutdown replset gracefully, no "ip" is provided'));
con = await mongodb_1.MongoClient.connect((0, utils_1.uriTemplate)(ip, port, 'admin'), {
...this.extraConnectionOptions,
directConnection: true,
});
const admin = con.db('admin'); // just to ensure it is actually the "admin" database
// "timeoutSecs" is set to "1" otherwise it will take at least "10" seconds to stop (very long tests)
await admin.command({ shutdown: 1, force: true, timeoutSecs: 1 });
this.debug('stop: after admin shutdown command');
}
catch (err) {
// Quote from MongoDB Documentation (https://docs.mongodb.com/manual/reference/command/replSetStepDown/#client-connections):
// > Starting in MongoDB 4.2, replSetStepDown command no longer closes all client connections.
// > In MongoDB 4.0 and earlier, replSetStepDown command closes all client connections during the step down.
// so error "MongoNetworkError: connection 1 to 127.0.0.1:41485 closed" will get thrown below 4.2
if (!(err instanceof mongodb_1.MongoNetworkError &&
/^connection \d+ to [\d.]+:\d+ closed$/i.test(err.message))) {
console.warn(err);
}
catch (err) {
// Quote from MongoDB Documentation (https://docs.mongodb.com/manual/reference/command/replSetStepDown/#client-connections):
// > Starting in MongoDB 4.2, replSetStepDown command no longer closes all client connections.
// > In MongoDB 4.0 and earlier, replSetStepDown command closes all client connections during the step down.
// so error "MongoNetworkError: connection 1 to 127.0.0.1:41485 closed" will get thrown below 4.2
if (!(err instanceof mongodb_1.MongoNetworkError &&
/^connection \d+ to [\d.]+:\d+ closed$/i.test(err.message))) {
console.warn(err);
}
}
finally {
if (!(0, utils_1.isNullOrUndefined)(con)) {
// even if it errors out, somehow the connection stays open
await con.close();
}
finally {
if (!(0, utils_1.isNullOrUndefined)(con)) {
// even if it errors out, somehow the connection stays open
yield con.close();
}
}
}
yield (0, utils_1.killProcess)(this.mongodProcess, 'mongodProcess', this.instanceOpts.port);
this.mongodProcess = undefined; // reset reference to the childProcess for "mongod"
}
else {
this.debug('stop: mongodProcess: nothing to shutdown, skipping');
}
if (!(0, utils_1.isNullOrUndefined)(this.killerProcess)) {
yield (0, utils_1.killProcess)(this.killerProcess, 'killerProcess', this.instanceOpts.port);
this.killerProcess = undefined; // reset reference to the childProcess for "mongo_killer"
}
else {
this.debug('stop: killerProcess: nothing to shutdown, skipping');
}
this.debug('stop: Instance Finished Shutdown');
return true;
});
await (0, utils_1.killProcess)(this.mongodProcess, 'mongodProcess', this.instanceOpts.port);
this.mongodProcess = undefined; // reset reference to the childProcess for "mongod"
}
else {
this.debug('stop: mongodProcess: nothing to shutdown, skipping');
}
if (!(0, utils_1.isNullOrUndefined)(this.killerProcess)) {
await (0, utils_1.killProcess)(this.killerProcess, 'killerProcess', this.instanceOpts.port);
this.killerProcess = undefined; // reset reference to the childProcess for "mongo_killer"
}
else {
this.debug('stop: killerProcess: nothing to shutdown, skipping');
}
this.debug('stop: Instance Finished Shutdown');
return true;
}

@@ -246,7 +244,9 @@ /**

_launchMongod(mongoBin) {
var _a, _b;
this.debug('_launchMongod: Launching Mongod Process');
const childProcess = (0, child_process_1.spawn)(path.resolve(mongoBin), this.prepareCommandArgs(), Object.assign(Object.assign({}, this.spawnOpts), { stdio: 'pipe' }));
(_a = childProcess.stderr) === null || _a === void 0 ? void 0 : _a.on('data', this.stderrHandler.bind(this));
(_b = childProcess.stdout) === null || _b === void 0 ? void 0 : _b.on('data', this.stdoutHandler.bind(this));
const childProcess = (0, child_process_1.spawn)(path.resolve(mongoBin), this.prepareCommandArgs(), {
...this.spawnOpts,
stdio: 'pipe', // ensure that stdio is always an pipe, regardless of user input
});
childProcess.stderr?.on('data', this.stderrHandler.bind(this));
childProcess.stdout?.on('data', this.stdoutHandler.bind(this));
childProcess.on('close', this.closeHandler.bind(this));

@@ -257,2 +257,3 @@ childProcess.on('error', this.errorHandler.bind(this));

}
childProcess.unref();
this.emit(MongoInstanceEvents.instanceLaunched);

@@ -328,3 +329,2 @@ return childProcess;

stdoutHandler(message) {
var _a, _b;
const line = message.toString().trim(); // trimming to remove extra new lines and spaces around the message

@@ -340,3 +340,3 @@ this.debug(`stdoutHandler: ""${line}""`); // denoting the STDOUT string with double quotes, because the stdout might also use quotes

if (/transition to \w+ from \w+/i.test(line)) {
const state = (_b = (_a = /transition to (\w+) from \w+/i.exec(line)) === null || _a === void 0 ? void 0 : _a[1]) !== null && _b !== void 0 ? _b : 'UNKNOWN';
const state = /transition to (\w+) from \w+/i.exec(line)?.[1] ?? 'UNKNOWN';
this.emit(MongoInstanceEvents.instanceReplState, state);

@@ -358,3 +358,2 @@ if (state !== 'PRIMARY') {

checkErrorInLine(line) {
var _a, _b, _c, _d, _e;
if (/address already in use/i.test(line)) {

@@ -367,6 +366,4 @@ this.emit(MongoInstanceEvents.instanceError, new errors_1.StdoutInstanceError(`Port "${this.instanceOpts.port}" already in use`));

// in pre-4.0 mongodb this exception may have been "permission denied" and "Data directory /path not found"
this.emit(MongoInstanceEvents.instanceError, new errors_1.StdoutInstanceError(`Instance Failed to start with "${(_a = execptionMatch[1]) !== null && _a !== void 0 ? _a : 'unknown'}". Original Error:\n` +
line
.substring(execptionMatch.index + execptionMatch[0].length)
.replace(/, terminating$/gi, '')));
this.emit(MongoInstanceEvents.instanceError, new errors_1.StdoutInstanceError(`Instance Failed to start with "${execptionMatch[1] ?? 'unknown'}". Original Error:\n` +
line.substring(execptionMatch.index + execptionMatch[0].length)));
}

@@ -376,5 +373,5 @@ // special handling for when mongodb outputs this error as json

if (execptionMatchJson) {
const loadedJSON = (_b = JSON.parse(line)) !== null && _b !== void 0 ? _b : {};
this.emit(MongoInstanceEvents.instanceError, new errors_1.StdoutInstanceError((_d = `Instance Failed to start with "DBException in initAndListen". Original Error:\n` +
((_c = loadedJSON === null || loadedJSON === void 0 ? void 0 : loadedJSON.attr) === null || _c === void 0 ? void 0 : _c.error)) !== null && _d !== void 0 ? _d : line // try to use the parsed json, but as fallback use the entire line
const loadedJSON = JSON.parse(line) ?? {};
this.emit(MongoInstanceEvents.instanceError, new errors_1.StdoutInstanceError(`Instance Failed to start with "DBException in initAndListen". Original Error:\n` +
loadedJSON?.attr?.error ?? line // try to use the parsed json, but as fallback use the entire line
));

@@ -398,3 +395,3 @@ }

if (!(0, utils_1.isNullOrUndefined)(liberrormatch)) {
const lib = (_e = liberrormatch[1].toLocaleLowerCase()) !== null && _e !== void 0 ? _e : 'unknown';
const lib = liberrormatch[1].toLocaleLowerCase() ?? 'unknown';
this.emit(MongoInstanceEvents.instanceError, new errors_1.StdoutInstanceError(`Instance failed to start because a library is missing or cannot be opened: "${lib}"`));

@@ -404,3 +401,5 @@ }

if (/\*\*\*aborting after/i.test(line)) {
this.emit(MongoInstanceEvents.instanceError, new errors_1.StdoutInstanceError('Mongod internal error'));
const match = line.match(/\*\*\*aborting after ([^\n]+)/i);
const extra = match?.[1] ? ` (${match[1]})` : '';
this.emit(MongoInstanceEvents.instanceError, new errors_1.StdoutInstanceError('Mongod internal error' + extra));
}

@@ -407,0 +406,0 @@ }

@@ -6,3 +6,2 @@ "use strict";

exports.postInstallEnsureBinary = void 0;
const tslib_1 = require("tslib");
const os_1 = require("os");

@@ -22,22 +21,20 @@ const path_1 = require("path");

}
function postInstallEnsureBinary(version, local) {
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
console.log('Mongodb-Memory-Server* checking MongoDB binaries');
if (!local) {
// set "DOWNLOAD_DIR" to ~/.cache
(0, resolveConfig_1.setDefaultValue)(resolveConfig_1.ResolveConfigVariables.DOWNLOAD_DIR, (0, path_1.resolve)((0, os_1.homedir)(), '.cache', 'mongodb-binaries'));
}
if (version) {
// if "version" is defined, apply it
(0, resolveConfig_1.setDefaultValue)(resolveConfig_1.ResolveConfigVariables.VERSION, version);
}
process.env[(0, resolveConfig_1.envName)(resolveConfig_1.ResolveConfigVariables.RUNTIME_DOWNLOAD)] = 'true'; // To make sure to actually download in an postinstall
const binPath = yield MongoBinary_1.MongoBinary.getPath().catch((err) => {
console.warn('Mongodb-Memory-Server* failed to find an binary:\n', err.message);
process.exit(0); // Exiting with "0" to not fail the install (because it is an problem that can be solved otherwise)
});
console.log(`Mongodb-Memory-Server* found binary: "${binPath}"`);
async function postInstallEnsureBinary(version, local) {
console.log('Mongodb-Memory-Server* checking MongoDB binaries');
if (!local) {
// set "DOWNLOAD_DIR" to ~/.cache
(0, resolveConfig_1.setDefaultValue)(resolveConfig_1.ResolveConfigVariables.DOWNLOAD_DIR, (0, path_1.resolve)((0, os_1.homedir)(), '.cache', 'mongodb-binaries'));
}
if (version) {
// if "version" is defined, apply it
(0, resolveConfig_1.setDefaultValue)(resolveConfig_1.ResolveConfigVariables.VERSION, version);
}
process.env[(0, resolveConfig_1.envName)(resolveConfig_1.ResolveConfigVariables.RUNTIME_DOWNLOAD)] = 'true'; // To make sure to actually download in an postinstall
const binPath = await MongoBinary_1.MongoBinary.getPath().catch((err) => {
console.warn('Mongodb-Memory-Server* failed to find an binary:\n', err.message);
process.exit(0); // Exiting with "0" to not fail the install (because it is an problem that can be solved otherwise)
});
console.log(`Mongodb-Memory-Server* found binary: "${binPath}"`);
}
exports.postInstallEnsureBinary = postInstallEnsureBinary;
//# sourceMappingURL=postinstallHelper.js.map

@@ -5,6 +5,6 @@ "use strict";

const tslib_1 = require("tslib");
const camelcase_1 = (0, tslib_1.__importDefault)(require("camelcase"));
const camelcase_1 = tslib_1.__importDefault(require("camelcase"));
const new_find_package_json_1 = require("new-find-package-json");
const debug_1 = (0, tslib_1.__importDefault)(require("debug"));
const path = (0, tslib_1.__importStar)(require("path"));
const debug_1 = tslib_1.__importDefault(require("debug"));
const path = tslib_1.__importStar(require("path"));
const fs_1 = require("fs");

@@ -48,2 +48,3 @@ const utils_1 = require("./utils");

[ResolveConfigVariables.USE_ARCHIVE_NAME_FOR_BINARY_NAME, 'false'],
[ResolveConfigVariables.MD5_CHECK, 'true'],
[ResolveConfigVariables.MAX_REDIRECTS, '2'],

@@ -69,3 +70,2 @@ ]);

function findPackageJson(directory) {
var _a;
for (const filename of (0, new_find_package_json_1.findSync)(directory || process.cwd())) {

@@ -75,4 +75,4 @@ log(`findPackageJson: Found package.json at "${filename}"`);

/** Shorthand for the long path */
const config = (_a = readout === null || readout === void 0 ? void 0 : readout.config) === null || _a === void 0 ? void 0 : _a.mongodbMemoryServer;
if (!(0, utils_1.isNullOrUndefined)(config) && Object.keys(config !== null && config !== void 0 ? config : {}).length > 0) {
const config = readout?.config?.mongodbMemoryServer;
if (!(0, utils_1.isNullOrUndefined)(config) && Object.keys(config ?? {}).length > 0) {
log(`findPackageJson: Found package with non-empty config field at "${filename}"`);

@@ -122,4 +122,5 @@ const filepath = path.dirname(filename);

function resolveConfig(variableName) {
var _a, _b, _c;
return (_c = ((_b = (_a = process.env[envName(variableName)]) !== null && _a !== void 0 ? _a : packagejson === null || packagejson === void 0 ? void 0 : packagejson.config[(0, camelcase_1.default)(variableName)]) !== null && _b !== void 0 ? _b : exports.defaultValues.get(variableName))) === null || _c === void 0 ? void 0 : _c.toString();
return (process.env[envName(variableName)] ??
packagejson?.config[(0, camelcase_1.default)(variableName)] ??
exports.defaultValues.get(variableName))?.toString();
}

@@ -126,0 +127,0 @@ exports.resolveConfig = resolveConfig;

/// <reference types="node" />
/// <reference types="node" />
/// <reference types="node" />
import { ChildProcess } from 'child_process';

@@ -6,2 +8,3 @@ import { AutomaticAuth } from '../MongoMemoryServer';

import { LinuxOS } from './getos';
import { BinaryLike } from 'crypto';
/**

@@ -112,4 +115,2 @@ * This is here, because NodeJS does not have a FSError type

abstract start(): Promise<void>;
/** @deprecated replace argument with `Cleanup` interface object */
abstract stop(cleanup: boolean): Promise<boolean>;
abstract stop(cleanup: Cleanup): Promise<boolean>;

@@ -122,4 +123,2 @@ }

abstract getUri(otherDB?: string | boolean): string;
/** @deprecated replace argument with `Cleanup` interface object */
abstract cleanup(force: boolean): Promise<void>;
abstract cleanup(cleanup: Cleanup): Promise<void>;

@@ -151,3 +150,20 @@ }

export declare function removeDir(dirPath: string): Promise<void>;
/**
* Helper function to have uuidv4 generation and definition in one place
* @returns a uuid-v4
*/
export declare function uuidv4(): string;
/**
* Helper function to have md5 generation and definition in one place
* @param content the content to checksum
* @returns a md5 of the input
*/
export declare function md5(content: BinaryLike): string;
/**
* Helper function to have md5 generation and definition in one place for a file
* @param file the location of a file to read for a hash
* @returns a md5 of the input file
*/
export declare function md5FromFile(file: string): Promise<string>;
export {};
//# sourceMappingURL=utils.d.ts.map
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.removeDir = exports.createTmpDir = exports.mkdir = exports.checkBinaryPermissions = exports.ManagerAdvanced = exports.ManagerBase = exports.tryReleaseFile = exports.pathExists = exports.statPath = exports.authDefault = exports.ensureAsync = exports.isAlive = exports.killProcess = exports.assertion = exports.isNullOrUndefined = exports.uriTemplate = exports.getHost = exports.generateDbName = exports.errorWithCode = void 0;
exports.md5FromFile = exports.md5 = exports.uuidv4 = exports.removeDir = exports.createTmpDir = exports.mkdir = exports.checkBinaryPermissions = exports.ManagerAdvanced = exports.ManagerBase = exports.tryReleaseFile = exports.pathExists = exports.statPath = exports.authDefault = exports.ensureAsync = exports.isAlive = exports.killProcess = exports.assertion = exports.isNullOrUndefined = exports.uriTemplate = exports.getHost = exports.generateDbName = exports.errorWithCode = void 0;
const tslib_1 = require("tslib");
const debug_1 = (0, tslib_1.__importDefault)(require("debug"));
const debug_1 = tslib_1.__importDefault(require("debug"));
const fs_1 = require("fs");
const errors_1 = require("./errors");
const os_1 = require("os");
const path = (0, tslib_1.__importStar)(require("path"));
const path = tslib_1.__importStar(require("path"));
const crypto_1 = require("crypto");
const log = (0, debug_1.default)('MongoMS:utils');

@@ -67,3 +68,3 @@ /**

if (!cond) {
throw error !== null && error !== void 0 ? error : new errors_1.AssertionFallbackError();
throw error ?? new errors_1.AssertionFallbackError();
}

@@ -78,42 +79,40 @@ }

*/
function killProcess(childprocess, name, mongodPort) {
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
function ilog(msg) {
log(`Mongo[${mongodPort || 'unknown'}] killProcess: ${msg}`);
}
// this case can somehow happen, see https://github.com/nodkz/mongodb-memory-server/issues/666
if (isNullOrUndefined(childprocess)) {
ilog('childprocess was somehow undefined');
return;
}
// check if the childProcess (via PID) is still alive (found thanks to https://github.com/nodkz/mongodb-memory-server/issues/411)
if (!isAlive(childprocess.pid)) {
ilog("given childProcess's PID was not alive anymore");
return;
}
/**
* Timeout before using SIGKILL
*/
const timeoutTime = 1000 * 10;
yield new Promise((res, rej) => {
let timeout = setTimeout(() => {
ilog('timeout triggered, trying SIGKILL');
if (!debug_1.default.enabled('MongoMS:utils')) {
console.warn('An Process didnt exit with signal "SIGINT" within 10 seconds, using "SIGKILL"!\n' +
'Enable debug logs for more information');
}
childprocess.kill('SIGKILL');
timeout = setTimeout(() => {
ilog('timeout triggered again, rejecting');
rej(new Error(`Process "${name}" didnt exit, enable debug for more information.`));
}, timeoutTime);
async function killProcess(childprocess, name, mongodPort) {
function ilog(msg) {
log(`Mongo[${mongodPort || 'unknown'}] killProcess: ${msg}`);
}
// this case can somehow happen, see https://github.com/nodkz/mongodb-memory-server/issues/666
if (isNullOrUndefined(childprocess)) {
ilog('childprocess was somehow undefined');
return;
}
// check if the childProcess (via PID) is still alive (found thanks to https://github.com/nodkz/mongodb-memory-server/issues/411)
if (!isAlive(childprocess.pid)) {
ilog("given childProcess's PID was not alive anymore");
return;
}
/**
* Timeout before using SIGKILL
*/
const timeoutTime = 1000 * 10;
await new Promise((res, rej) => {
let timeout = setTimeout(() => {
ilog('timeout triggered, trying SIGKILL');
if (!debug_1.default.enabled('MongoMS:utils')) {
console.warn('An Process didnt exit with signal "SIGINT" within 10 seconds, using "SIGKILL"!\n' +
'Enable debug logs for more information');
}
childprocess.kill('SIGKILL');
timeout = setTimeout(() => {
ilog('timeout triggered again, rejecting');
rej(new Error(`Process "${name}" didnt exit, enable debug for more information.`));
}, timeoutTime);
childprocess.once(`exit`, (code, signal) => {
ilog(`${name}: got exit signal, Code: ${code}, Signal: ${signal}`);
clearTimeout(timeout);
res();
});
ilog(`${name}: sending "SIGINT"`);
childprocess.kill('SIGINT');
}, timeoutTime);
childprocess.once(`exit`, (code, signal) => {
ilog(`${name}: got exit signal, Code: ${code}, Signal: ${signal}`);
clearTimeout(timeout);
res();
});
ilog(`${name}: sending "SIGINT"`);
childprocess.kill('SIGINT');
});

@@ -144,6 +143,4 @@ }

*/
function ensureAsync() {
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
return new Promise((res) => process.nextTick(res));
});
async function ensureAsync() {
return new Promise((res) => process.nextTick(res));
}

@@ -156,3 +153,11 @@ exports.ensureAsync = ensureAsync;

function authDefault(opts) {
return Object.assign({ force: false, disable: false, customRootName: 'mongodb-memory-server-root', customRootPwd: 'rootuser', extraUsers: [], keyfileContent: '0123456789' }, opts);
return {
force: false,
enable: true,
customRootName: 'mongodb-memory-server-root',
customRootPwd: 'rootuser',
extraUsers: [],
keyfileContent: '0123456789',
...opts,
};
}

@@ -166,11 +171,9 @@ exports.authDefault = authDefault;

*/
function statPath(path) {
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
return fs_1.promises.stat(path).catch((err) => {
// catch the error if the directory doesn't exist or permission is denied, without throwing an error
if (['ENOENT', 'EACCES'].includes(err.code)) {
return undefined;
}
throw err;
});
async function statPath(path) {
return fs_1.promises.stat(path).catch((err) => {
// catch the error if the directory doesn't exist or permission is denied, without throwing an error
if (['ENOENT', 'EACCES'].includes(err.code)) {
return undefined;
}
throw err;
});

@@ -185,6 +188,4 @@ }

*/
function pathExists(path) {
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
return !isNullOrUndefined(yield statPath(path));
});
async function pathExists(path) {
return !isNullOrUndefined(await statPath(path));
}

@@ -197,16 +198,14 @@ exports.pathExists = pathExists;

*/
function tryReleaseFile(path, parser) {
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
try {
const output = yield fs_1.promises.readFile(path);
return parser(output.toString());
async function tryReleaseFile(path, parser) {
try {
const output = await fs_1.promises.readFile(path);
return parser(output.toString());
}
catch (err) {
if (errorWithCode(err) && !['ENOENT', 'EACCES'].includes(err.code)) {
throw err;
}
catch (err) {
if (errorWithCode(err) && !['ENOENT', 'EACCES'].includes(err.code)) {
throw err;
}
log(`tryReleaseFile: "${path}" does not exist`);
return undefined;
}
});
log(`tryReleaseFile: "${path}" does not exist`);
return undefined;
}
}

@@ -230,19 +229,17 @@ exports.tryReleaseFile = tryReleaseFile;

*/
function checkBinaryPermissions(path) {
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
try {
yield fs_1.promises.access(path, fs_1.constants.X_OK); // check if the provided path exists and has the execute bit for current user
}
catch (err) {
if (errorWithCode(err)) {
if (err.code === 'EACCES') {
throw new errors_1.InsufficientPermissionsError(path);
}
if (err.code === 'ENOENT') {
throw new errors_1.BinaryNotFoundError(path);
}
async function checkBinaryPermissions(path) {
try {
await fs_1.promises.access(path, fs_1.constants.X_OK); // check if the provided path exists and has the execute bit for current user
}
catch (err) {
if (errorWithCode(err)) {
if (err.code === 'EACCES') {
throw new errors_1.InsufficientPermissionsError(path);
}
throw err;
if (err.code === 'ENOENT') {
throw new errors_1.BinaryNotFoundError(path);
}
}
});
throw err;
}
}

@@ -255,6 +252,4 @@ exports.checkBinaryPermissions = checkBinaryPermissions;

*/
function mkdir(path) {
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
yield fs_1.promises.mkdir(path, { recursive: true });
});
async function mkdir(path) {
await fs_1.promises.mkdir(path, { recursive: true });
}

@@ -268,7 +263,5 @@ exports.mkdir = mkdir;

*/
function createTmpDir(prefix, atPath) {
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
const tmpPath = atPath !== null && atPath !== void 0 ? atPath : (0, os_1.tmpdir)();
return fs_1.promises.mkdtemp(path.join(tmpPath, prefix));
});
async function createTmpDir(prefix, atPath) {
const tmpPath = atPath ?? (0, os_1.tmpdir)();
return fs_1.promises.mkdtemp(path.join(tmpPath, prefix));
}

@@ -281,25 +274,49 @@ exports.createTmpDir = createTmpDir;

*/
function removeDir(dirPath) {
return (0, tslib_1.__awaiter)(this, void 0, void 0, function* () {
const stat = yield statPath(dirPath);
if (isNullOrUndefined(stat)) {
return;
}
if (!stat.isDirectory()) {
throw new Error(`Given Path is not a directory! (Path: "${dirPath}")`);
}
if ('rm' in fs_1.promises) {
// only since NodeJS 14
yield fs_1.promises.rm(dirPath, { force: true, recursive: true });
}
else {
// before NodeJS 14
// needs the bridge via the interface, because we are using @types/node 14, where this if evaluates to a always "true" in typescript's eyes
yield fs_1.promises.rmdir(dirPath, {
recursive: true,
});
}
});
async function removeDir(dirPath) {
const stat = await statPath(dirPath);
if (isNullOrUndefined(stat)) {
return;
}
if (!stat.isDirectory()) {
throw new Error(`Given Path is not a directory! (Path: "${dirPath}")`);
}
if ('rm' in fs_1.promises) {
// only since NodeJS 14
await fs_1.promises.rm(dirPath, { force: true, recursive: true });
}
else {
// before NodeJS 14
// needs the bridge via the interface, because we are using @types/node 14, where this if evaluates to a always "true" in typescript's eyes
await fs_1.promises.rmdir(dirPath, {
recursive: true,
});
}
}
exports.removeDir = removeDir;
/**
* Helper function to have uuidv4 generation and definition in one place
* @returns a uuid-v4
*/
function uuidv4() {
return (0, crypto_1.randomUUID)();
}
exports.uuidv4 = uuidv4;
/**
* Helper function to have md5 generation and definition in one place
* @param content the content to checksum
* @returns a md5 of the input
*/
function md5(content) {
return (0, crypto_1.createHash)('md5').update(content).digest('hex');
}
exports.md5 = md5;
/**
* Helper function to have md5 generation and definition in one place for a file
* @param file the location of a file to read for a hash
* @returns a md5 of the input file
*/
async function md5FromFile(file) {
return md5(await fs_1.promises.readFile(file));
}
exports.md5FromFile = md5FromFile;
//# sourceMappingURL=utils.js.map
{
"name": "mongodb-memory-server-core",
"version": "8.15.0",
"version": "9.0.0-beta.1",
"description": "MongoDB Server for testing (core package, without autodownload). The server will allow you to connect your favourite ODM or client library to the MongoDB Server and run parallel integration tests isolated from each other.",

@@ -13,3 +13,3 @@ "main": "lib/index",

"engines": {
"node": ">=12.22.0"
"node": ">=14.20.1"
},

@@ -40,23 +40,19 @@ "files": [

"@types/tar-stream": "^2.2.2",
"@types/uuid": "^9.0.2",
"@types/yauzl": "^2.10.0",
"@types/yazl": "^2.4.2",
"rimraf": "^3.0.2",
"rimraf": "^5.0.1",
"yazl": "^2.5.1"
},
"dependencies": {
"async-mutex": "^0.3.2",
"async-mutex": "^0.4.0",
"camelcase": "^6.3.0",
"debug": "^4.3.4",
"find-cache-dir": "^3.3.2",
"follow-redirects": "^1.15.2",
"get-port": "^5.1.1",
"https-proxy-agent": "^5.0.1",
"md5-file": "^5.0.0",
"mongodb": "^4.16.0",
"https-proxy-agent": "^7.0.1",
"mongodb": "^5.6.0",
"new-find-package-json": "^2.0.0",
"semver": "^7.5.4",
"tar-stream": "^2.1.4",
"tar-stream": "^3.0.0",
"tslib": "^2.6.1",
"uuid": "^9.0.0",
"follow-redirects": "^1.15.2",
"yauzl": "^2.10.0"

@@ -63,0 +59,0 @@ },

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc