You're Invited:Meet the Socket Team at RSAC and BSidesSF 2026, March 23–26.RSVP
Socket
Book a DemoSign in
Socket

@mikro-orm/mongodb

Package Overview
Dependencies
Maintainers
1
Versions
4189
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@mikro-orm/mongodb - npm Package Compare versions

Comparing version
7.0.3-dev.21
to
7.0.3
+5
-1
index.d.ts

@@ -11,2 +11,6 @@ export * from '@mikro-orm/core';

export { MongoEntityRepository as EntityRepository } from './MongoEntityRepository.js';
export { MongoMikroORM as MikroORM, type MongoOptions as Options, defineMongoConfig as defineConfig, } from './MongoMikroORM.js';
export {
MongoMikroORM as MikroORM,
type MongoOptions as Options,
defineMongoConfig as defineConfig,
} from './MongoMikroORM.js';
+1
-1

@@ -11,2 +11,2 @@ export * from '@mikro-orm/core';

export { MongoEntityRepository as EntityRepository } from './MongoEntityRepository.js';
export { MongoMikroORM as MikroORM, defineMongoConfig as defineConfig, } from './MongoMikroORM.js';
export { MongoMikroORM as MikroORM, defineMongoConfig as defineConfig } from './MongoMikroORM.js';

@@ -1,78 +0,159 @@

import { type ClientSession, type Collection, type Db, MongoClient, type MongoClientOptions, type TransactionOptions } from 'mongodb';
import { type AnyEntity, type CollationOptions, type Configuration, Connection, type ConnectionOptions, type ConnectionType, type Dictionary, type EntityData, type EntityName, type FilterQuery, type IsolationLevel, type LoggingOptions, type QueryOrderMap, type QueryResult, type Transaction, type TransactionEventBroadcaster, type UpsertManyOptions, type UpsertOptions } from '@mikro-orm/core';
import {
type ClientSession,
type Collection,
type Db,
MongoClient,
type MongoClientOptions,
type TransactionOptions,
} from 'mongodb';
import {
type AnyEntity,
type CollationOptions,
type Configuration,
Connection,
type ConnectionOptions,
type ConnectionType,
type Dictionary,
type EntityData,
type EntityName,
type FilterQuery,
type IsolationLevel,
type LoggingOptions,
type QueryOrderMap,
type QueryResult,
type Transaction,
type TransactionEventBroadcaster,
type UpsertManyOptions,
type UpsertOptions,
} from '@mikro-orm/core';
/** MongoDB database connection using the official `mongodb` driver. */
export declare class MongoConnection extends Connection {
#private;
constructor(config: Configuration, options?: ConnectionOptions, type?: ConnectionType);
connect(options?: {
skipOnConnect?: boolean;
}): Promise<void>;
createClient(): void;
close(force?: boolean): Promise<void>;
isConnected(): Promise<boolean>;
checkConnection(): Promise<{
#private;
constructor(config: Configuration, options?: ConnectionOptions, type?: ConnectionType);
connect(options?: { skipOnConnect?: boolean }): Promise<void>;
createClient(): void;
close(force?: boolean): Promise<void>;
isConnected(): Promise<boolean>;
checkConnection(): Promise<
| {
ok: true;
} | {
}
| {
ok: false;
reason: string;
error?: Error;
}>;
getClient(): MongoClient;
getCollection<T extends object>(name: EntityName<T> | string): Collection<T>;
createCollection<T extends object>(name: EntityName<T>): Promise<Collection<T>>;
listCollections(): Promise<string[]>;
dropCollection(name: EntityName<AnyEntity>): Promise<boolean>;
mapOptions(overrides: MongoClientOptions): MongoClientOptions;
getDb(): Db;
execute(query: string): Promise<any>;
find<T extends object>(entityName: EntityName<T>, where: FilterQuery<T>, opts?: MongoFindOptions<T>): Promise<EntityData<T>[]>;
stream<T extends object>(entityName: EntityName<T>, where: FilterQuery<T>, opts?: MongoFindOptions<T>): AsyncIterableIterator<T>;
private _find;
insertOne<T extends object>(entityName: EntityName<T>, data: Partial<T>, ctx?: Transaction<ClientSession>): Promise<QueryResult<T>>;
insertMany<T extends object>(entityName: EntityName<T>, data: Partial<T>[], ctx?: Transaction<ClientSession>): Promise<QueryResult<T>>;
updateMany<T extends object>(entityName: EntityName<T>, where: FilterQuery<T>, data: Partial<T>, ctx?: Transaction<ClientSession>, upsert?: boolean, upsertOptions?: UpsertOptions<T>): Promise<QueryResult<T>>;
bulkUpdateMany<T extends object>(entityName: EntityName<T>, where: FilterQuery<T>[], data: Partial<T>[], ctx?: Transaction<ClientSession>, upsert?: boolean, upsertOptions?: UpsertManyOptions<T>): Promise<QueryResult<T>>;
deleteMany<T extends object>(entityName: EntityName<T>, where: FilterQuery<T>, ctx?: Transaction<ClientSession>): Promise<QueryResult<T>>;
aggregate<T extends object = any>(entityName: EntityName<T>, pipeline: any[], ctx?: Transaction<ClientSession>, loggerContext?: LoggingOptions): Promise<T[]>;
streamAggregate<T extends object>(entityName: EntityName<T>, pipeline: any[], ctx?: Transaction<ClientSession>, loggerContext?: LoggingOptions, stream?: boolean): AsyncIterableIterator<T>;
countDocuments<T extends object>(entityName: EntityName<T>, where: FilterQuery<T>, opts?: MongoCountOptions): Promise<number>;
transactional<T>(cb: (trx: Transaction<ClientSession>) => Promise<T>, options?: {
isolationLevel?: IsolationLevel;
ctx?: Transaction<ClientSession>;
eventBroadcaster?: TransactionEventBroadcaster;
} & TransactionOptions): Promise<T>;
begin(options?: {
isolationLevel?: IsolationLevel;
ctx?: ClientSession;
eventBroadcaster?: TransactionEventBroadcaster;
} & TransactionOptions): Promise<ClientSession>;
commit(ctx: ClientSession, eventBroadcaster?: TransactionEventBroadcaster): Promise<void>;
rollback(ctx: ClientSession, eventBroadcaster?: TransactionEventBroadcaster): Promise<void>;
private runQuery;
private rethrow;
private createUpdatePayload;
private transformResult;
private getCollectionName;
private logObject;
}
>;
getClient(): MongoClient;
getCollection<T extends object>(name: EntityName<T> | string): Collection<T>;
createCollection<T extends object>(name: EntityName<T>): Promise<Collection<T>>;
listCollections(): Promise<string[]>;
dropCollection(name: EntityName<AnyEntity>): Promise<boolean>;
mapOptions(overrides: MongoClientOptions): MongoClientOptions;
getDb(): Db;
execute(query: string): Promise<any>;
find<T extends object>(
entityName: EntityName<T>,
where: FilterQuery<T>,
opts?: MongoFindOptions<T>,
): Promise<EntityData<T>[]>;
stream<T extends object>(
entityName: EntityName<T>,
where: FilterQuery<T>,
opts?: MongoFindOptions<T>,
): AsyncIterableIterator<T>;
private _find;
insertOne<T extends object>(
entityName: EntityName<T>,
data: Partial<T>,
ctx?: Transaction<ClientSession>,
): Promise<QueryResult<T>>;
insertMany<T extends object>(
entityName: EntityName<T>,
data: Partial<T>[],
ctx?: Transaction<ClientSession>,
): Promise<QueryResult<T>>;
updateMany<T extends object>(
entityName: EntityName<T>,
where: FilterQuery<T>,
data: Partial<T>,
ctx?: Transaction<ClientSession>,
upsert?: boolean,
upsertOptions?: UpsertOptions<T>,
): Promise<QueryResult<T>>;
bulkUpdateMany<T extends object>(
entityName: EntityName<T>,
where: FilterQuery<T>[],
data: Partial<T>[],
ctx?: Transaction<ClientSession>,
upsert?: boolean,
upsertOptions?: UpsertManyOptions<T>,
): Promise<QueryResult<T>>;
deleteMany<T extends object>(
entityName: EntityName<T>,
where: FilterQuery<T>,
ctx?: Transaction<ClientSession>,
): Promise<QueryResult<T>>;
aggregate<T extends object = any>(
entityName: EntityName<T>,
pipeline: any[],
ctx?: Transaction<ClientSession>,
loggerContext?: LoggingOptions,
): Promise<T[]>;
streamAggregate<T extends object>(
entityName: EntityName<T>,
pipeline: any[],
ctx?: Transaction<ClientSession>,
loggerContext?: LoggingOptions,
stream?: boolean,
): AsyncIterableIterator<T>;
countDocuments<T extends object>(
entityName: EntityName<T>,
where: FilterQuery<T>,
opts?: MongoCountOptions,
): Promise<number>;
transactional<T>(
cb: (trx: Transaction<ClientSession>) => Promise<T>,
options?: {
isolationLevel?: IsolationLevel;
ctx?: Transaction<ClientSession>;
eventBroadcaster?: TransactionEventBroadcaster;
} & TransactionOptions,
): Promise<T>;
begin(
options?: {
isolationLevel?: IsolationLevel;
ctx?: ClientSession;
eventBroadcaster?: TransactionEventBroadcaster;
} & TransactionOptions,
): Promise<ClientSession>;
commit(ctx: ClientSession, eventBroadcaster?: TransactionEventBroadcaster): Promise<void>;
rollback(ctx: ClientSession, eventBroadcaster?: TransactionEventBroadcaster): Promise<void>;
private runQuery;
private rethrow;
private createUpdatePayload;
private transformResult;
private getCollectionName;
private logObject;
}
/** Options shared across MongoDB query operations. */
export interface MongoQueryOptions {
collation?: CollationOptions;
indexHint?: string | Dictionary;
maxTimeMS?: number;
allowDiskUse?: boolean;
collation?: CollationOptions;
indexHint?: string | Dictionary;
maxTimeMS?: number;
allowDiskUse?: boolean;
}
/** Options for MongoDB find operations. */
export interface MongoFindOptions<T extends object> extends MongoQueryOptions {
orderBy?: QueryOrderMap<T> | QueryOrderMap<T>[];
limit?: number;
offset?: number;
fields?: string[];
ctx?: Transaction<ClientSession>;
loggerContext?: LoggingOptions;
orderBy?: QueryOrderMap<T> | QueryOrderMap<T>[];
limit?: number;
offset?: number;
fields?: string[];
ctx?: Transaction<ClientSession>;
loggerContext?: LoggingOptions;
}
/** Options for MongoDB count operations. */
export interface MongoCountOptions extends Omit<MongoQueryOptions, 'allowDiskUse'> {
ctx?: Transaction<ClientSession>;
loggerContext?: LoggingOptions;
ctx?: Transaction<ClientSession>;
loggerContext?: LoggingOptions;
}

@@ -1,428 +0,433 @@

import { MongoClient, ObjectId, } from 'mongodb';
import { Connection, EventType, inspect, QueryOrder, Utils, ValidationError, } from '@mikro-orm/core';
import { MongoClient, ObjectId } from 'mongodb';
import { Connection, EventType, inspect, QueryOrder, Utils, ValidationError } from '@mikro-orm/core';
/** MongoDB database connection using the official `mongodb` driver. */
export class MongoConnection extends Connection {
#client;
#db;
constructor(config, options, type = 'write') {
super(config, options, type);
// @ts-ignore
ObjectId.prototype[Symbol.for('nodejs.util.inspect.custom')] = function () {
return `ObjectId('${this.toHexString()}')`;
};
// @ts-ignore
Date.prototype[Symbol.for('nodejs.util.inspect.custom')] = function () {
return `ISODate('${this.toISOString()}')`;
};
#client;
#db;
constructor(config, options, type = 'write') {
super(config, options, type);
// @ts-ignore
ObjectId.prototype[Symbol.for('nodejs.util.inspect.custom')] = function () {
return `ObjectId('${this.toHexString()}')`;
};
// @ts-ignore
Date.prototype[Symbol.for('nodejs.util.inspect.custom')] = function () {
return `ISODate('${this.toISOString()}')`;
};
}
async connect(options) {
this.getClient();
this.connected = true;
if (options?.skipOnConnect !== true) {
await this.onConnect();
}
async connect(options) {
this.getClient();
this.connected = true;
if (options?.skipOnConnect !== true) {
await this.onConnect();
}
}
createClient() {
let driverOptions = this.options.driverOptions ?? this.config.get('driverOptions');
if (typeof driverOptions === 'function') {
driverOptions = driverOptions();
}
createClient() {
let driverOptions = this.options.driverOptions ?? this.config.get('driverOptions');
if (typeof driverOptions === 'function') {
driverOptions = driverOptions();
}
if (driverOptions instanceof MongoClient) {
this.logger.log('info', 'Reusing MongoClient provided via `driverOptions`');
this.#client = driverOptions;
}
else {
this.#client = new MongoClient(this.config.get('clientUrl'), this.mapOptions(driverOptions));
this.#client.appendMetadata({
name: 'MikroORM',
version: Utils.getORMVersion(),
});
const onCreateConnection = this.options.onCreateConnection ?? this.config.get('onCreateConnection');
/* v8 ignore next */
this.#client.on('connectionCreated', () => {
void onCreateConnection?.(this.#client);
});
}
this.#db = this.#client.db(this.config.get('dbName'));
if (driverOptions instanceof MongoClient) {
this.logger.log('info', 'Reusing MongoClient provided via `driverOptions`');
this.#client = driverOptions;
} else {
this.#client = new MongoClient(this.config.get('clientUrl'), this.mapOptions(driverOptions));
this.#client.appendMetadata({
name: 'MikroORM',
version: Utils.getORMVersion(),
});
const onCreateConnection = this.options.onCreateConnection ?? this.config.get('onCreateConnection');
/* v8 ignore next */
this.#client.on('connectionCreated', () => {
void onCreateConnection?.(this.#client);
});
}
async close(force) {
await this.#client?.close(force);
this.connected = false;
this.#client = undefined;
this.#db = this.#client.db(this.config.get('dbName'));
}
async close(force) {
await this.#client?.close(force);
this.connected = false;
this.#client = undefined;
}
async isConnected() {
try {
const res = await this.#db?.command({ ping: 1 });
return (this.connected = !!res?.ok);
} catch (error) {
return (this.connected = false);
}
async isConnected() {
try {
const res = await this.#db?.command({ ping: 1 });
return (this.connected = !!res?.ok);
}
catch (error) {
return (this.connected = false);
}
}
async checkConnection() {
try {
const res = await this.#db?.command({ ping: 1 });
return res?.ok
? { ok: true }
: { ok: false, reason: 'Ping reply does not feature "ok" property, or it evaluates to "false"' };
} catch (error) {
return { ok: false, reason: error.message, error };
}
async checkConnection() {
try {
const res = await this.#db?.command({ ping: 1 });
return res?.ok
? { ok: true }
: { ok: false, reason: 'Ping reply does not feature "ok" property, or it evaluates to "false"' };
}
catch (error) {
return { ok: false, reason: error.message, error };
}
}
getClient() {
if (!this.#client) {
this.createClient();
}
getClient() {
if (!this.#client) {
this.createClient();
}
return this.#client;
return this.#client;
}
getCollection(name) {
return this.getDb().collection(this.getCollectionName(name));
}
async createCollection(name) {
return this.getDb().createCollection(this.getCollectionName(name));
}
async listCollections() {
const collections = await this.getDb().listCollections({}, { nameOnly: true }).toArray();
return collections.map(c => c.name);
}
async dropCollection(name) {
return this.getDb().dropCollection(this.getCollectionName(name));
}
mapOptions(overrides) {
const ret = {};
const pool = this.config.get('pool');
const username = this.config.get('user');
const password = this.config.get('password');
if (this.config.get('host')) {
throw new ValidationError('Mongo driver does not support `host` options, use `clientUrl` instead!');
}
getCollection(name) {
return this.getDb().collection(this.getCollectionName(name));
if (username && password) {
ret.auth = { username, password };
}
async createCollection(name) {
return this.getDb().createCollection(this.getCollectionName(name));
ret.minPoolSize = pool.min;
ret.maxPoolSize = pool.max;
ret.waitQueueTimeoutMS = pool.idleTimeoutMillis;
return Utils.mergeConfig(ret, overrides);
}
getDb() {
this.#db ??= this.getClient().db(this.config.get('dbName'));
return this.#db;
}
async execute(query) {
throw new Error(`${this.constructor.name} does not support generic execute method`);
}
async find(entityName, where, opts = {}) {
const { cursor, query } = await this._find(entityName, where, opts);
const now = Date.now();
const res = await cursor.toArray();
this.logQuery(`${query}.toArray();`, { took: Date.now() - now, results: res.length, ...opts.loggerContext });
return res;
}
async *stream(entityName, where, opts = {}) {
const { cursor, query } = await this._find(entityName, where, opts);
this.logQuery(`${query}.toArray();`, opts.loggerContext);
yield* cursor;
}
async _find(entityName, where, opts = {}) {
await this.ensureConnection();
const collection = this.getCollectionName(entityName);
const options = opts.ctx ? { session: opts.ctx } : {};
if (opts.fields) {
options.projection = opts.fields.reduce((o, k) => Object.assign(o, { [k]: 1 }), {});
}
async listCollections() {
const collections = await this.getDb().listCollections({}, { nameOnly: true }).toArray();
return collections.map(c => c.name);
if (opts.collation) {
options.collation = opts.collation;
}
async dropCollection(name) {
return this.getDb().dropCollection(this.getCollectionName(name));
if (opts.indexHint != null) {
options.hint = opts.indexHint;
}
mapOptions(overrides) {
const ret = {};
const pool = this.config.get('pool');
const username = this.config.get('user');
const password = this.config.get('password');
if (this.config.get('host')) {
throw new ValidationError('Mongo driver does not support `host` options, use `clientUrl` instead!');
}
if (username && password) {
ret.auth = { username, password };
}
ret.minPoolSize = pool.min;
ret.maxPoolSize = pool.max;
ret.waitQueueTimeoutMS = pool.idleTimeoutMillis;
return Utils.mergeConfig(ret, overrides);
if (opts.maxTimeMS != null) {
options.maxTimeMS = opts.maxTimeMS;
}
getDb() {
this.#db ??= this.getClient().db(this.config.get('dbName'));
return this.#db;
if (opts.allowDiskUse != null) {
options.allowDiskUse = opts.allowDiskUse;
}
async execute(query) {
throw new Error(`${this.constructor.name} does not support generic execute method`);
const resultSet = this.getCollection(entityName).find(where, options);
let query = `db.getCollection('${collection}').find(${this.logObject(where)}, ${this.logObject(options)})`;
const orderBy = Utils.asArray(opts.orderBy);
if (orderBy.length > 0) {
const orderByTuples = [];
orderBy.forEach(o => {
Utils.keys(o).forEach(k => {
const direction = o[k];
if (typeof direction === 'string') {
orderByTuples.push([k.toString(), direction.toUpperCase() === QueryOrder.ASC ? 1 : -1]);
} else {
orderByTuples.push([k.toString(), direction]);
}
});
});
if (orderByTuples.length > 0) {
query += `.sort(${this.logObject(orderByTuples)})`;
resultSet.sort(orderByTuples);
}
}
async find(entityName, where, opts = {}) {
const { cursor, query } = await this._find(entityName, where, opts);
const now = Date.now();
const res = await cursor.toArray();
this.logQuery(`${query}.toArray();`, { took: Date.now() - now, results: res.length, ...opts.loggerContext });
return res;
if (opts.limit !== undefined) {
query += `.limit(${opts.limit})`;
resultSet.limit(opts.limit);
}
async *stream(entityName, where, opts = {}) {
const { cursor, query } = await this._find(entityName, where, opts);
this.logQuery(`${query}.toArray();`, opts.loggerContext);
yield* cursor;
if (opts.offset !== undefined) {
query += `.skip(${opts.offset})`;
resultSet.skip(opts.offset);
}
async _find(entityName, where, opts = {}) {
await this.ensureConnection();
const collection = this.getCollectionName(entityName);
const options = opts.ctx ? { session: opts.ctx } : {};
if (opts.fields) {
options.projection = opts.fields.reduce((o, k) => Object.assign(o, { [k]: 1 }), {});
}
if (opts.collation) {
options.collation = opts.collation;
}
if (opts.indexHint != null) {
options.hint = opts.indexHint;
}
if (opts.maxTimeMS != null) {
options.maxTimeMS = opts.maxTimeMS;
}
if (opts.allowDiskUse != null) {
options.allowDiskUse = opts.allowDiskUse;
}
const resultSet = this.getCollection(entityName).find(where, options);
let query = `db.getCollection('${collection}').find(${this.logObject(where)}, ${this.logObject(options)})`;
const orderBy = Utils.asArray(opts.orderBy);
if (orderBy.length > 0) {
const orderByTuples = [];
orderBy.forEach(o => {
Utils.keys(o).forEach(k => {
const direction = o[k];
if (typeof direction === 'string') {
orderByTuples.push([k.toString(), direction.toUpperCase() === QueryOrder.ASC ? 1 : -1]);
}
else {
orderByTuples.push([k.toString(), direction]);
}
});
});
if (orderByTuples.length > 0) {
query += `.sort(${this.logObject(orderByTuples)})`;
resultSet.sort(orderByTuples);
}
}
if (opts.limit !== undefined) {
query += `.limit(${opts.limit})`;
resultSet.limit(opts.limit);
}
if (opts.offset !== undefined) {
query += `.skip(${opts.offset})`;
resultSet.skip(opts.offset);
}
return { cursor: resultSet, query };
return { cursor: resultSet, query };
}
async insertOne(entityName, data, ctx) {
return this.runQuery('insertOne', entityName, data, undefined, ctx);
}
async insertMany(entityName, data, ctx) {
return this.runQuery('insertMany', entityName, data, undefined, ctx);
}
async updateMany(entityName, where, data, ctx, upsert, upsertOptions) {
return this.runQuery('updateMany', entityName, data, where, ctx, { upsert, upsertOptions });
}
async bulkUpdateMany(entityName, where, data, ctx, upsert, upsertOptions) {
return this.runQuery('bulkUpdateMany', entityName, data, where, ctx, { upsert, upsertOptions });
}
async deleteMany(entityName, where, ctx) {
return this.runQuery('deleteMany', entityName, undefined, where, ctx);
}
async aggregate(entityName, pipeline, ctx, loggerContext) {
await this.ensureConnection();
const collection = this.getCollectionName(entityName);
/* v8 ignore next */
const options = ctx ? { session: ctx } : {};
const query = `db.getCollection('${collection}').aggregate(${this.logObject(pipeline)}, ${this.logObject(options)}).toArray();`;
const now = Date.now();
const res = await this.getCollection(entityName).aggregate(pipeline, options).toArray();
this.logQuery(query, { took: Date.now() - now, results: res.length, ...loggerContext });
return res;
}
async *streamAggregate(entityName, pipeline, ctx, loggerContext, stream = false) {
await this.ensureConnection();
const collection = this.getCollectionName(entityName);
/* v8 ignore next */
const options = ctx ? { session: ctx } : {};
const query = `db.getCollection('${collection}').aggregate(${this.logObject(pipeline)}, ${this.logObject(options)})};`;
const cursor = this.getCollection(entityName).aggregate(pipeline, options);
this.logQuery(query, { ...loggerContext });
yield* cursor;
}
async countDocuments(entityName, where, opts = {}) {
return this.runQuery('countDocuments', entityName, undefined, where, opts.ctx, {
loggerContext: opts.loggerContext,
collation: opts.collation,
indexHint: opts.indexHint,
maxTimeMS: opts.maxTimeMS,
});
}
async transactional(cb, options = {}) {
await this.ensureConnection();
const session = await this.begin(options);
try {
const ret = await cb(session);
await this.commit(session, options.eventBroadcaster);
return ret;
} catch (error) {
await this.rollback(session, options.eventBroadcaster);
throw error;
} finally {
await session.endSession();
}
async insertOne(entityName, data, ctx) {
return this.runQuery('insertOne', entityName, data, undefined, ctx);
}
async begin(options = {}) {
await this.ensureConnection();
const { ctx, isolationLevel, eventBroadcaster, ...txOptions } = options;
if (!ctx) {
await eventBroadcaster?.dispatchEvent(EventType.beforeTransactionStart);
}
async insertMany(entityName, data, ctx) {
return this.runQuery('insertMany', entityName, data, undefined, ctx);
const session = ctx || this.getClient().startSession();
session.startTransaction(txOptions);
this.logQuery('db.begin();');
await eventBroadcaster?.dispatchEvent(EventType.afterTransactionStart, session);
return session;
}
async commit(ctx, eventBroadcaster) {
await this.ensureConnection();
await eventBroadcaster?.dispatchEvent(EventType.beforeTransactionCommit, ctx);
await ctx.commitTransaction();
this.logQuery('db.commit();');
await eventBroadcaster?.dispatchEvent(EventType.afterTransactionCommit, ctx);
}
async rollback(ctx, eventBroadcaster) {
await this.ensureConnection();
await eventBroadcaster?.dispatchEvent(EventType.beforeTransactionRollback, ctx);
await ctx.abortTransaction();
this.logQuery('db.rollback();');
await eventBroadcaster?.dispatchEvent(EventType.afterTransactionRollback, ctx);
}
async runQuery(method, entityName, data, where, ctx, opts) {
await this.ensureConnection();
const { upsert, upsertOptions, loggerContext, collation, indexHint, maxTimeMS } = opts ?? {};
const collection = this.getCollectionName(entityName);
const logger = this.config.getLogger();
const options = ctx ? { session: ctx, upsert } : { upsert };
if (options.upsert === undefined) {
delete options.upsert;
}
async updateMany(entityName, where, data, ctx, upsert, upsertOptions) {
return this.runQuery('updateMany', entityName, data, where, ctx, { upsert, upsertOptions });
}
async bulkUpdateMany(entityName, where, data, ctx, upsert, upsertOptions) {
return this.runQuery('bulkUpdateMany', entityName, data, where, ctx, { upsert, upsertOptions });
}
async deleteMany(entityName, where, ctx) {
return this.runQuery('deleteMany', entityName, undefined, where, ctx);
}
async aggregate(entityName, pipeline, ctx, loggerContext) {
await this.ensureConnection();
const collection = this.getCollectionName(entityName);
/* v8 ignore next */
const options = ctx ? { session: ctx } : {};
const query = `db.getCollection('${collection}').aggregate(${this.logObject(pipeline)}, ${this.logObject(options)}).toArray();`;
const now = Date.now();
const res = await this.getCollection(entityName).aggregate(pipeline, options).toArray();
this.logQuery(query, { took: Date.now() - now, results: res.length, ...loggerContext });
return res;
}
async *streamAggregate(entityName, pipeline, ctx, loggerContext, stream = false) {
await this.ensureConnection();
const collection = this.getCollectionName(entityName);
/* v8 ignore next */
const options = ctx ? { session: ctx } : {};
const query = `db.getCollection('${collection}').aggregate(${this.logObject(pipeline)}, ${this.logObject(options)})};`;
const cursor = this.getCollection(entityName).aggregate(pipeline, options);
this.logQuery(query, { ...loggerContext });
yield* cursor;
}
async countDocuments(entityName, where, opts = {}) {
return this.runQuery('countDocuments', entityName, undefined, where, opts.ctx, {
loggerContext: opts.loggerContext,
collation: opts.collation,
indexHint: opts.indexHint,
maxTimeMS: opts.maxTimeMS,
const now = Date.now();
let res;
let query;
const log = msg => (logger.isEnabled('query') ? msg() : '');
switch (method) {
case 'insertOne':
Object.keys(data)
.filter(k => typeof data[k] === 'undefined')
.forEach(k => delete data[k]);
query = log(
() => `db.getCollection('${collection}').insertOne(${this.logObject(data)}, ${this.logObject(options)});`,
);
res = await this.rethrow(this.getCollection(entityName).insertOne(data, options), query);
break;
case 'insertMany':
data.forEach(data =>
Object.keys(data)
.filter(k => typeof data[k] === 'undefined')
.forEach(k => delete data[k]),
);
query = log(
() => `db.getCollection('${collection}').insertMany(${this.logObject(data)}, ${this.logObject(options)});`,
);
res = await this.rethrow(this.getCollection(entityName).insertMany(data, options), query);
break;
case 'updateMany': {
const payload = Object.keys(data).every(k => k.startsWith('$'))
? data
: this.createUpdatePayload(data, upsertOptions);
query = log(
() =>
`db.getCollection('${collection}').updateMany(${this.logObject(where)}, ${this.logObject(payload)}, ${this.logObject(options)});`,
);
res = await this.rethrow(this.getCollection(entityName).updateMany(where, payload, options), query);
break;
}
case 'bulkUpdateMany': {
query = log(
() => `bulk = db.getCollection('${collection}').initializeUnorderedBulkOp(${this.logObject(options)});\n`,
);
const bulk = this.getCollection(entityName).initializeUnorderedBulkOp(options);
data.forEach((row, idx) => {
const id = where[idx];
const cond = Utils.isPlainObject(id) ? id : { _id: id };
const doc = this.createUpdatePayload(row, upsertOptions);
if (upsert) {
if (Utils.isEmpty(cond)) {
query += log(() => `bulk.insert(${this.logObject(row)});\n`);
bulk.insert(row);
} else {
query += log(() => `bulk.find(${this.logObject(cond)}).upsert().update(${this.logObject(doc)});\n`);
bulk.find(cond).upsert().update(doc);
}
return;
}
query += log(() => `bulk.find(${this.logObject(cond)}).update(${this.logObject(doc)});\n`);
bulk.find(cond).update(doc);
});
}
async transactional(cb, options = {}) {
await this.ensureConnection();
const session = await this.begin(options);
try {
const ret = await cb(session);
await this.commit(session, options.eventBroadcaster);
return ret;
query += log(() => `bulk.execute()`);
res = await this.rethrow(bulk.execute(), query);
break;
}
case 'deleteMany':
case 'countDocuments':
if (method === 'countDocuments') {
if (collation) {
options.collation = collation;
}
if (indexHint != null) {
options.hint = indexHint;
}
if (maxTimeMS != null) {
options.maxTimeMS = maxTimeMS;
}
}
catch (error) {
await this.rollback(session, options.eventBroadcaster);
throw error;
}
finally {
await session.endSession();
}
query = log(
() => `db.getCollection('${collection}').${method}(${this.logObject(where)}, ${this.logObject(options)});`,
);
res = await this.rethrow(this.getCollection(entityName)[method](where, options), query);
break;
}
async begin(options = {}) {
await this.ensureConnection();
const { ctx, isolationLevel, eventBroadcaster, ...txOptions } = options;
if (!ctx) {
await eventBroadcaster?.dispatchEvent(EventType.beforeTransactionStart);
}
const session = ctx || this.getClient().startSession();
session.startTransaction(txOptions);
this.logQuery('db.begin();');
await eventBroadcaster?.dispatchEvent(EventType.afterTransactionStart, session);
return session;
this.logQuery(query, { took: Date.now() - now, ...loggerContext });
if (method === 'countDocuments') {
return res;
}
async commit(ctx, eventBroadcaster) {
await this.ensureConnection();
await eventBroadcaster?.dispatchEvent(EventType.beforeTransactionCommit, ctx);
await ctx.commitTransaction();
this.logQuery('db.commit();');
await eventBroadcaster?.dispatchEvent(EventType.afterTransactionCommit, ctx);
return this.transformResult(res);
}
rethrow(promise, query) {
return promise.catch(e => {
this.logQuery(query, { level: 'error' });
e.message += '\nQuery: ' + query;
throw e;
});
}
createUpdatePayload(row, upsertOptions) {
row = { ...row };
const doc = { $set: row };
Utils.keys(row).forEach(k => {
if (k.toString().startsWith('$')) {
doc[k] = row[k];
delete row[k];
}
});
const $unset = {};
const $inc = {};
for (const k of Utils.keys(row)) {
const item = row[k];
if (typeof item === 'undefined') {
$unset[k] = '';
delete row[k];
continue;
}
if (Utils.isPlainObject(item) && '$inc' in item) {
$inc[k] = item.$inc;
delete row[k];
}
}
async rollback(ctx, eventBroadcaster) {
await this.ensureConnection();
await eventBroadcaster?.dispatchEvent(EventType.beforeTransactionRollback, ctx);
await ctx.abortTransaction();
this.logQuery('db.rollback();');
await eventBroadcaster?.dispatchEvent(EventType.afterTransactionRollback, ctx);
}
async runQuery(method, entityName, data, where, ctx, opts) {
await this.ensureConnection();
const { upsert, upsertOptions, loggerContext, collation, indexHint, maxTimeMS } = opts ?? {};
const collection = this.getCollectionName(entityName);
const logger = this.config.getLogger();
const options = ctx ? { session: ctx, upsert } : { upsert };
if (options.upsert === undefined) {
delete options.upsert;
}
const now = Date.now();
let res;
let query;
const log = (msg) => (logger.isEnabled('query') ? msg() : '');
switch (method) {
case 'insertOne':
Object.keys(data)
.filter(k => typeof data[k] === 'undefined')
.forEach(k => delete data[k]);
query = log(() => `db.getCollection('${collection}').insertOne(${this.logObject(data)}, ${this.logObject(options)});`);
res = await this.rethrow(this.getCollection(entityName).insertOne(data, options), query);
break;
case 'insertMany':
data.forEach(data => Object.keys(data)
.filter(k => typeof data[k] === 'undefined')
.forEach(k => delete data[k]));
query = log(() => `db.getCollection('${collection}').insertMany(${this.logObject(data)}, ${this.logObject(options)});`);
res = await this.rethrow(this.getCollection(entityName).insertMany(data, options), query);
break;
case 'updateMany': {
const payload = Object.keys(data).every(k => k.startsWith('$'))
? data
: this.createUpdatePayload(data, upsertOptions);
query = log(() => `db.getCollection('${collection}').updateMany(${this.logObject(where)}, ${this.logObject(payload)}, ${this.logObject(options)});`);
res = (await this.rethrow(this.getCollection(entityName).updateMany(where, payload, options), query));
break;
}
case 'bulkUpdateMany': {
query = log(() => `bulk = db.getCollection('${collection}').initializeUnorderedBulkOp(${this.logObject(options)});\n`);
const bulk = this.getCollection(entityName).initializeUnorderedBulkOp(options);
data.forEach((row, idx) => {
const id = where[idx];
const cond = Utils.isPlainObject(id) ? id : { _id: id };
const doc = this.createUpdatePayload(row, upsertOptions);
if (upsert) {
if (Utils.isEmpty(cond)) {
query += log(() => `bulk.insert(${this.logObject(row)});\n`);
bulk.insert(row);
}
else {
query += log(() => `bulk.find(${this.logObject(cond)}).upsert().update(${this.logObject(doc)});\n`);
bulk.find(cond).upsert().update(doc);
}
return;
}
query += log(() => `bulk.find(${this.logObject(cond)}).update(${this.logObject(doc)});\n`);
bulk.find(cond).update(doc);
});
query += log(() => `bulk.execute()`);
res = await this.rethrow(bulk.execute(), query);
break;
}
case 'deleteMany':
case 'countDocuments':
if (method === 'countDocuments') {
if (collation) {
options.collation = collation;
}
if (indexHint != null) {
options.hint = indexHint;
}
if (maxTimeMS != null) {
options.maxTimeMS = maxTimeMS;
}
}
query = log(() => `db.getCollection('${collection}').${method}(${this.logObject(where)}, ${this.logObject(options)});`);
res = await this.rethrow(this.getCollection(entityName)[method](where, options), query);
break;
}
this.logQuery(query, { took: Date.now() - now, ...loggerContext });
if (method === 'countDocuments') {
return res;
}
return this.transformResult(res);
}
rethrow(promise, query) {
return promise.catch(e => {
this.logQuery(query, { level: 'error' });
e.message += '\nQuery: ' + query;
throw e;
if (upsertOptions) {
if (upsertOptions.onConflictAction === 'ignore') {
doc.$setOnInsert = doc.$set;
delete doc.$set;
}
if (upsertOptions.onConflictMergeFields) {
doc.$setOnInsert = {};
upsertOptions.onConflictMergeFields.forEach(f => {
doc.$setOnInsert[f] = doc.$set[f];
delete doc.$set[f];
});
}
createUpdatePayload(row, upsertOptions) {
row = { ...row };
const doc = { $set: row };
Utils.keys(row).forEach(k => {
if (k.toString().startsWith('$')) {
doc[k] = row[k];
delete row[k];
}
const { $set, $setOnInsert } = doc;
doc.$set = $setOnInsert;
doc.$setOnInsert = $set;
} else if (upsertOptions.onConflictExcludeFields) {
doc.$setOnInsert = {};
upsertOptions.onConflictExcludeFields.forEach(f => {
doc.$setOnInsert[f] = doc.$set[f];
delete doc.$set[f];
});
const $unset = {};
const $inc = {};
for (const k of Utils.keys(row)) {
const item = row[k];
if (typeof item === 'undefined') {
$unset[k] = '';
delete row[k];
continue;
}
if (Utils.isPlainObject(item) && '$inc' in item) {
$inc[k] = item.$inc;
delete row[k];
}
}
if (upsertOptions) {
if (upsertOptions.onConflictAction === 'ignore') {
doc.$setOnInsert = doc.$set;
delete doc.$set;
}
if (upsertOptions.onConflictMergeFields) {
doc.$setOnInsert = {};
upsertOptions.onConflictMergeFields.forEach(f => {
doc.$setOnInsert[f] = doc.$set[f];
delete doc.$set[f];
});
const { $set, $setOnInsert } = doc;
doc.$set = $setOnInsert;
doc.$setOnInsert = $set;
}
else if (upsertOptions.onConflictExcludeFields) {
doc.$setOnInsert = {};
upsertOptions.onConflictExcludeFields.forEach(f => {
doc.$setOnInsert[f] = doc.$set[f];
delete doc.$set[f];
});
}
}
if (Utils.hasObjectKeys($unset)) {
doc.$unset = $unset;
}
if (Utils.hasObjectKeys($inc)) {
doc.$inc = $inc;
}
if (!Utils.hasObjectKeys(doc.$set)) {
delete doc.$set;
}
return doc;
}
}
transformResult(res) {
return {
affectedRows: res.modifiedCount || res.deletedCount || res.insertedCount || 0,
insertId: res.insertedId ?? res.insertedIds?.[0],
insertedIds: res.insertedIds ? Object.values(res.insertedIds) : undefined,
};
if (Utils.hasObjectKeys($unset)) {
doc.$unset = $unset;
}
getCollectionName(entityName) {
const meta = this.metadata.find(entityName);
return meta ? meta.collection : Utils.className(entityName);
if (Utils.hasObjectKeys($inc)) {
doc.$inc = $inc;
}
logObject(o) {
if (o?.session) {
o = { ...o, session: `[ClientSession]` };
}
return inspect(o, { depth: 5, compact: true, breakLength: 300 });
if (!Utils.hasObjectKeys(doc.$set)) {
delete doc.$set;
}
return doc;
}
transformResult(res) {
return {
affectedRows: res.modifiedCount || res.deletedCount || res.insertedCount || 0,
insertId: res.insertedId ?? res.insertedIds?.[0],
insertedIds: res.insertedIds ? Object.values(res.insertedIds) : undefined,
};
}
getCollectionName(entityName) {
const meta = this.metadata.find(entityName);
return meta ? meta.collection : Utils.className(entityName);
}
logObject(o) {
if (o?.session) {
o = { ...o, session: `[ClientSession]` };
}
return inspect(o, { depth: 5, compact: true, breakLength: 300 });
}
}
import { type ClientSession } from 'mongodb';
import { type Configuration, type Constructor, type CountOptions, DatabaseDriver, type EntityData, type EntityDictionary, type EntityField, EntityManagerType, type EntityName, type FilterQuery, type FindOneOptions, type FindOptions, type NativeInsertUpdateManyOptions, type NativeInsertUpdateOptions, type PopulateOptions, type QueryResult, type StreamOptions, type Transaction, type UpsertManyOptions, type UpsertOptions } from '@mikro-orm/core';
import {
type Configuration,
type Constructor,
type CountOptions,
DatabaseDriver,
type EntityData,
type EntityDictionary,
type EntityField,
EntityManagerType,
type EntityName,
type FilterQuery,
type FindOneOptions,
type FindOptions,
type NativeInsertUpdateManyOptions,
type NativeInsertUpdateOptions,
type PopulateOptions,
type QueryResult,
type StreamOptions,
type Transaction,
type UpsertManyOptions,
type UpsertOptions,
} from '@mikro-orm/core';
import { MongoConnection } from './MongoConnection.js';

@@ -9,33 +30,84 @@ import { MongoPlatform } from './MongoPlatform.js';

export declare class MongoDriver extends DatabaseDriver<MongoConnection> {
[EntityManagerType]: MongoEntityManager<this>;
protected readonly connection: MongoConnection;
protected readonly platform: MongoPlatform;
constructor(config: Configuration);
createEntityManager(useContext?: boolean): this[typeof EntityManagerType];
stream<T extends object>(entityName: EntityName<T>, where: FilterQuery<T>, options: StreamOptions<T, any, any, any> & {
rawResults?: boolean;
}): AsyncIterableIterator<T>;
find<T extends object, P extends string = never, F extends string = never, E extends string = never>(entityName: EntityName<T>, where: FilterQuery<T>, options?: FindOptions<T, P, F, E>): Promise<EntityData<T>[]>;
findOne<T extends object, P extends string = never, F extends string = never, E extends string = never>(entityName: EntityName<T>, where: FilterQuery<T>, options?: FindOneOptions<T, P, F, E>): Promise<EntityData<T> | null>;
findVirtual<T extends object>(entityName: EntityName<T>, where: FilterQuery<T>, options: FindOptions<T, any, any, any>): Promise<EntityData<T>[]>;
streamVirtual<T extends object>(entityName: EntityName<T>, where: FilterQuery<T>, options: FindOptions<T, any, any, any>): AsyncIterableIterator<EntityData<T>>;
count<T extends object>(entityName: EntityName<T>, where: FilterQuery<T>, options?: CountOptions<T>): Promise<number>;
nativeInsert<T extends object>(entityName: EntityName<T>, data: EntityDictionary<T>, options?: NativeInsertUpdateOptions<T>): Promise<QueryResult<T>>;
nativeInsertMany<T extends object>(entityName: EntityName<T>, data: EntityDictionary<T>[], options?: NativeInsertUpdateManyOptions<T>): Promise<QueryResult<T>>;
nativeUpdate<T extends object>(entityName: EntityName<T>, where: FilterQuery<T>, data: EntityDictionary<T>, options?: NativeInsertUpdateOptions<T> & UpsertOptions<T>): Promise<QueryResult<T>>;
nativeUpdateMany<T extends object>(entityName: EntityName<T>, where: FilterQuery<T>[], data: EntityDictionary<T>[], options?: NativeInsertUpdateOptions<T> & UpsertManyOptions<T>): Promise<QueryResult<T>>;
nativeDelete<T extends object>(entityName: EntityName<T>, where: FilterQuery<T>, options?: {
ctx?: Transaction<ClientSession>;
}): Promise<QueryResult<T>>;
aggregate(entityName: EntityName, pipeline: any[], ctx?: Transaction<ClientSession>): Promise<any[]>;
streamAggregate<T extends object>(entityName: EntityName<T>, pipeline: any[], ctx?: Transaction<ClientSession>): AsyncIterableIterator<T>;
getPlatform(): MongoPlatform;
private buildQueryOptions;
private renameFields;
private convertObjectIds;
private buildFilterById;
protected buildFields<T extends object, P extends string = never>(entityName: EntityName<T>, populate: PopulateOptions<T>[], fields?: readonly EntityField<T, P>[], exclude?: string[]): string[] | undefined;
private handleVersionProperty;
/** @inheritDoc */
getORMClass(): Constructor<MongoMikroORM>;
[EntityManagerType]: MongoEntityManager<this>;
protected readonly connection: MongoConnection;
protected readonly platform: MongoPlatform;
constructor(config: Configuration);
createEntityManager(useContext?: boolean): this[typeof EntityManagerType];
stream<T extends object>(
entityName: EntityName<T>,
where: FilterQuery<T>,
options: StreamOptions<T, any, any, any> & {
rawResults?: boolean;
},
): AsyncIterableIterator<T>;
find<T extends object, P extends string = never, F extends string = never, E extends string = never>(
entityName: EntityName<T>,
where: FilterQuery<T>,
options?: FindOptions<T, P, F, E>,
): Promise<EntityData<T>[]>;
findOne<T extends object, P extends string = never, F extends string = never, E extends string = never>(
entityName: EntityName<T>,
where: FilterQuery<T>,
options?: FindOneOptions<T, P, F, E>,
): Promise<EntityData<T> | null>;
findVirtual<T extends object>(
entityName: EntityName<T>,
where: FilterQuery<T>,
options: FindOptions<T, any, any, any>,
): Promise<EntityData<T>[]>;
streamVirtual<T extends object>(
entityName: EntityName<T>,
where: FilterQuery<T>,
options: FindOptions<T, any, any, any>,
): AsyncIterableIterator<EntityData<T>>;
count<T extends object>(entityName: EntityName<T>, where: FilterQuery<T>, options?: CountOptions<T>): Promise<number>;
nativeInsert<T extends object>(
entityName: EntityName<T>,
data: EntityDictionary<T>,
options?: NativeInsertUpdateOptions<T>,
): Promise<QueryResult<T>>;
nativeInsertMany<T extends object>(
entityName: EntityName<T>,
data: EntityDictionary<T>[],
options?: NativeInsertUpdateManyOptions<T>,
): Promise<QueryResult<T>>;
nativeUpdate<T extends object>(
entityName: EntityName<T>,
where: FilterQuery<T>,
data: EntityDictionary<T>,
options?: NativeInsertUpdateOptions<T> & UpsertOptions<T>,
): Promise<QueryResult<T>>;
nativeUpdateMany<T extends object>(
entityName: EntityName<T>,
where: FilterQuery<T>[],
data: EntityDictionary<T>[],
options?: NativeInsertUpdateOptions<T> & UpsertManyOptions<T>,
): Promise<QueryResult<T>>;
nativeDelete<T extends object>(
entityName: EntityName<T>,
where: FilterQuery<T>,
options?: {
ctx?: Transaction<ClientSession>;
},
): Promise<QueryResult<T>>;
aggregate(entityName: EntityName, pipeline: any[], ctx?: Transaction<ClientSession>): Promise<any[]>;
streamAggregate<T extends object>(
entityName: EntityName<T>,
pipeline: any[],
ctx?: Transaction<ClientSession>,
): AsyncIterableIterator<T>;
getPlatform(): MongoPlatform;
private buildQueryOptions;
private renameFields;
private convertObjectIds;
private buildFilterById;
protected buildFields<T extends object, P extends string = never>(
entityName: EntityName<T>,
populate: PopulateOptions<T>[],
fields?: readonly EntityField<T, P>[],
exclude?: string[],
): string[] | undefined;
private handleVersionProperty;
/** @inheritDoc */
getORMClass(): Constructor<MongoMikroORM>;
}
import { ObjectId } from 'mongodb';
import { DatabaseDriver, EntityManagerType, PolymorphicRef, ReferenceKind, Utils, } from '@mikro-orm/core';
import { DatabaseDriver, EntityManagerType, PolymorphicRef, ReferenceKind, Utils } from '@mikro-orm/core';
import { MongoConnection } from './MongoConnection.js';

@@ -9,437 +9,447 @@ import { MongoPlatform } from './MongoPlatform.js';

export class MongoDriver extends DatabaseDriver {
[EntityManagerType];
connection = new MongoConnection(this.config);
platform = new MongoPlatform();
constructor(config) {
super(config, ['mongodb']);
[EntityManagerType];
connection = new MongoConnection(this.config);
platform = new MongoPlatform();
constructor(config) {
super(config, ['mongodb']);
}
createEntityManager(useContext) {
const EntityManagerClass = this.config.get('entityManager', MongoEntityManager);
return new EntityManagerClass(this.config, this, this.metadata, useContext);
}
async *stream(entityName, where, options) {
if (this.metadata.find(entityName)?.virtual) {
yield* this.streamVirtual(entityName, where, options);
return;
}
createEntityManager(useContext) {
const EntityManagerClass = this.config.get('entityManager', MongoEntityManager);
return new EntityManagerClass(this.config, this, this.metadata, useContext);
const fields = this.buildFields(entityName, options.populate || [], options.fields, options.exclude);
where = this.renameFields(entityName, where, true);
const orderBy = Utils.asArray(options.orderBy).map(orderBy => this.renameFields(entityName, orderBy, true));
const res = this.getConnection('read').stream(entityName, where, {
orderBy,
limit: options.limit,
offset: options.offset,
fields,
ctx: options.ctx,
...this.buildQueryOptions(options),
});
for await (const item of res) {
if (options.rawResults) {
yield item;
} else {
yield this.mapResult(item, this.metadata.find(entityName));
}
}
async *stream(entityName, where, options) {
if (this.metadata.find(entityName)?.virtual) {
yield* this.streamVirtual(entityName, where, options);
return;
}
const fields = this.buildFields(entityName, options.populate || [], options.fields, options.exclude);
where = this.renameFields(entityName, where, true);
const orderBy = Utils.asArray(options.orderBy).map(orderBy => this.renameFields(entityName, orderBy, true));
const res = this.getConnection('read').stream(entityName, where, {
orderBy,
limit: options.limit,
offset: options.offset,
fields,
ctx: options.ctx,
...this.buildQueryOptions(options),
});
for await (const item of res) {
if (options.rawResults) {
yield item;
}
else {
yield this.mapResult(item, this.metadata.find(entityName));
}
}
}
async find(entityName, where, options = {}) {
if (this.metadata.find(entityName)?.virtual) {
return this.findVirtual(entityName, where, options);
}
async find(entityName, where, options = {}) {
if (this.metadata.find(entityName)?.virtual) {
return this.findVirtual(entityName, where, options);
const { first, last, before, after } = options;
const fields = this.buildFields(entityName, options.populate || [], options.fields, options.exclude);
where = this.renameFields(entityName, where, true);
const isCursorPagination = [first, last, before, after].some(v => v != null);
if (isCursorPagination) {
const andWhere = (cond1, cond2) => {
if (Utils.isEmpty(cond1)) {
return cond2;
}
const { first, last, before, after } = options;
const fields = this.buildFields(entityName, options.populate || [], options.fields, options.exclude);
where = this.renameFields(entityName, where, true);
const isCursorPagination = [first, last, before, after].some(v => v != null);
if (isCursorPagination) {
const andWhere = (cond1, cond2) => {
if (Utils.isEmpty(cond1)) {
return cond2;
}
if (Utils.isEmpty(cond2)) {
return cond1;
}
return { $and: [cond1, cond2] };
};
const meta = this.metadata.find(entityName);
const { orderBy: newOrderBy, where: newWhere } = this.processCursorOptions(meta, options, options.orderBy);
const newWhereConverted = this.renameFields(entityName, newWhere, true);
const orderBy = Utils.asArray(newOrderBy).map(order => this.renameFields(entityName, order, true));
const res = await this.rethrow(this.getConnection('read').find(entityName, andWhere(where, newWhereConverted), {
orderBy,
limit: options.limit,
offset: options.offset,
fields,
ctx: options.ctx,
loggerContext: options.logging,
...this.buildQueryOptions(options),
}));
if (isCursorPagination && !first && !!last) {
res.reverse();
}
return res.map(r => this.mapResult(r, this.metadata.find(entityName)));
if (Utils.isEmpty(cond2)) {
return cond1;
}
const orderBy = Utils.asArray(options.orderBy).map(orderBy => this.renameFields(entityName, orderBy, true));
const res = await this.rethrow(this.getConnection('read').find(entityName, where, {
orderBy,
limit: options.limit,
offset: options.offset,
fields,
ctx: options.ctx,
...this.buildQueryOptions(options),
}));
return res.map(r => this.mapResult(r, this.metadata.find(entityName)));
return { $and: [cond1, cond2] };
};
const meta = this.metadata.find(entityName);
const { orderBy: newOrderBy, where: newWhere } = this.processCursorOptions(meta, options, options.orderBy);
const newWhereConverted = this.renameFields(entityName, newWhere, true);
const orderBy = Utils.asArray(newOrderBy).map(order => this.renameFields(entityName, order, true));
const res = await this.rethrow(
this.getConnection('read').find(entityName, andWhere(where, newWhereConverted), {
orderBy,
limit: options.limit,
offset: options.offset,
fields,
ctx: options.ctx,
loggerContext: options.logging,
...this.buildQueryOptions(options),
}),
);
if (isCursorPagination && !first && !!last) {
res.reverse();
}
return res.map(r => this.mapResult(r, this.metadata.find(entityName)));
}
async findOne(entityName, where, options = { populate: [], orderBy: {} }) {
if (this.metadata.find(entityName)?.virtual) {
const [item] = await this.findVirtual(entityName, where, options);
/* v8 ignore next */
return item ?? null;
}
if (Utils.isPrimaryKey(where)) {
where = this.buildFilterById(entityName, where);
}
const fields = this.buildFields(entityName, options.populate || [], options.fields, options.exclude);
where = this.renameFields(entityName, where, true);
const orderBy = Utils.asArray(options.orderBy).map(orderBy => this.renameFields(entityName, orderBy, true));
const res = await this.rethrow(this.getConnection('read').find(entityName, where, {
orderBy,
limit: 1,
fields,
ctx: options.ctx,
loggerContext: options.logging,
...this.buildQueryOptions(options),
}));
return this.mapResult(res[0], this.metadata.find(entityName));
const orderBy = Utils.asArray(options.orderBy).map(orderBy => this.renameFields(entityName, orderBy, true));
const res = await this.rethrow(
this.getConnection('read').find(entityName, where, {
orderBy,
limit: options.limit,
offset: options.offset,
fields,
ctx: options.ctx,
...this.buildQueryOptions(options),
}),
);
return res.map(r => this.mapResult(r, this.metadata.find(entityName)));
}
async findOne(entityName, where, options = { populate: [], orderBy: {} }) {
if (this.metadata.find(entityName)?.virtual) {
const [item] = await this.findVirtual(entityName, where, options);
/* v8 ignore next */
return item ?? null;
}
async findVirtual(entityName, where, options) {
const meta = this.metadata.find(entityName);
if (meta.expression instanceof Function) {
const em = this.createEntityManager();
return meta.expression(em, where, options);
}
/* v8 ignore next */
return super.findVirtual(entityName, where, options);
if (Utils.isPrimaryKey(where)) {
where = this.buildFilterById(entityName, where);
}
async *streamVirtual(entityName, where, options) {
const meta = this.metadata.find(entityName);
if (meta.expression instanceof Function) {
const em = this.createEntityManager();
const stream = await meta.expression(em, where, options, true);
yield* stream;
return;
}
/* v8 ignore next */
return super.findVirtual(entityName, where, options);
const fields = this.buildFields(entityName, options.populate || [], options.fields, options.exclude);
where = this.renameFields(entityName, where, true);
const orderBy = Utils.asArray(options.orderBy).map(orderBy => this.renameFields(entityName, orderBy, true));
const res = await this.rethrow(
this.getConnection('read').find(entityName, where, {
orderBy,
limit: 1,
fields,
ctx: options.ctx,
loggerContext: options.logging,
...this.buildQueryOptions(options),
}),
);
return this.mapResult(res[0], this.metadata.find(entityName));
}
async findVirtual(entityName, where, options) {
const meta = this.metadata.find(entityName);
if (meta.expression instanceof Function) {
const em = this.createEntityManager();
return meta.expression(em, where, options);
}
async count(entityName, where, options = {}) {
/* v8 ignore next */
if (this.metadata.find(entityName)?.virtual) {
return this.countVirtual(entityName, where, options);
}
where = this.renameFields(entityName, where, true);
const queryOpts = this.buildQueryOptions(options);
return this.rethrow(this.getConnection('read').countDocuments(entityName, where, {
ctx: options.ctx,
loggerContext: options.logging,
...queryOpts,
}));
/* v8 ignore next */
return super.findVirtual(entityName, where, options);
}
async *streamVirtual(entityName, where, options) {
const meta = this.metadata.find(entityName);
if (meta.expression instanceof Function) {
const em = this.createEntityManager();
const stream = await meta.expression(em, where, options, true);
yield* stream;
return;
}
async nativeInsert(entityName, data, options = {}) {
this.handleVersionProperty(entityName, data);
data = this.renameFields(entityName, data);
return this.rethrow(this.getConnection('write').insertOne(entityName, data, options.ctx));
/* v8 ignore next */
return super.findVirtual(entityName, where, options);
}
async count(entityName, where, options = {}) {
/* v8 ignore next */
if (this.metadata.find(entityName)?.virtual) {
return this.countVirtual(entityName, where, options);
}
async nativeInsertMany(entityName, data, options = {}) {
data = data.map(item => {
this.handleVersionProperty(entityName, item);
return this.renameFields(entityName, item);
});
const meta = this.metadata.find(entityName);
/* v8 ignore next */
const pk = meta?.getPrimaryProps()[0].fieldNames[0] ?? '_id';
const res = await this.rethrow(this.getConnection('write').insertMany(entityName, data, options.ctx));
res.rows = res.insertedIds.map(id => ({ [pk]: id }));
return res;
where = this.renameFields(entityName, where, true);
const queryOpts = this.buildQueryOptions(options);
return this.rethrow(
this.getConnection('read').countDocuments(entityName, where, {
ctx: options.ctx,
loggerContext: options.logging,
...queryOpts,
}),
);
}
async nativeInsert(entityName, data, options = {}) {
this.handleVersionProperty(entityName, data);
data = this.renameFields(entityName, data);
return this.rethrow(this.getConnection('write').insertOne(entityName, data, options.ctx));
}
async nativeInsertMany(entityName, data, options = {}) {
data = data.map(item => {
this.handleVersionProperty(entityName, item);
return this.renameFields(entityName, item);
});
const meta = this.metadata.find(entityName);
/* v8 ignore next */
const pk = meta?.getPrimaryProps()[0].fieldNames[0] ?? '_id';
const res = await this.rethrow(this.getConnection('write').insertMany(entityName, data, options.ctx));
res.rows = res.insertedIds.map(id => ({ [pk]: id }));
return res;
}
async nativeUpdate(entityName, where, data, options = {}) {
if (Utils.isPrimaryKey(where)) {
where = this.buildFilterById(entityName, where);
}
async nativeUpdate(entityName, where, data, options = {}) {
if (Utils.isPrimaryKey(where)) {
where = this.buildFilterById(entityName, where);
}
this.handleVersionProperty(entityName, data, true);
data = this.renameFields(entityName, data);
where = this.renameFields(entityName, where, true);
options = { ...options };
const meta = this.metadata.find(entityName);
/* v8 ignore next */
const rename = (field) => meta ? (meta.properties[field]?.fieldNames[0] ?? field) : field;
if (options.onConflictFields && Array.isArray(options.onConflictFields)) {
options.onConflictFields = options.onConflictFields.map(rename);
}
if (options.onConflictMergeFields) {
options.onConflictMergeFields = options.onConflictMergeFields.map(rename);
}
if (options.onConflictExcludeFields) {
options.onConflictExcludeFields = options.onConflictExcludeFields.map(rename);
}
return this.rethrow(this.getConnection('write').updateMany(entityName, where, data, options.ctx, options.upsert, options));
this.handleVersionProperty(entityName, data, true);
data = this.renameFields(entityName, data);
where = this.renameFields(entityName, where, true);
options = { ...options };
const meta = this.metadata.find(entityName);
/* v8 ignore next */
const rename = field => (meta ? (meta.properties[field]?.fieldNames[0] ?? field) : field);
if (options.onConflictFields && Array.isArray(options.onConflictFields)) {
options.onConflictFields = options.onConflictFields.map(rename);
}
async nativeUpdateMany(entityName, where, data, options = {}) {
where = where.map(row => {
if (Utils.isPlainObject(row)) {
return this.renameFields(entityName, row, true);
}
return row;
});
data = data.map(row => {
this.handleVersionProperty(entityName, row, true);
return this.renameFields(entityName, row);
});
options = { ...options };
const meta = this.metadata.find(entityName);
/* v8 ignore next */
const rename = (field) => meta ? (meta.properties[field]?.fieldNames[0] ?? field) : field;
if (options.onConflictFields && Array.isArray(options.onConflictFields)) {
options.onConflictFields = options.onConflictFields.map(rename);
}
if (options.onConflictMergeFields) {
options.onConflictMergeFields = options.onConflictMergeFields.map(rename);
}
if (options.onConflictExcludeFields) {
options.onConflictExcludeFields = options.onConflictExcludeFields.map(rename);
}
/* v8 ignore next */
const pk = meta?.getPrimaryProps()[0].fieldNames[0] ?? '_id';
const res = await this.rethrow(this.getConnection('write').bulkUpdateMany(entityName, where, data, options.ctx, options.upsert, options));
if (res.insertedIds) {
let i = 0;
res.rows = where.map(cond => {
if (Utils.isEmpty(cond)) {
return { [pk]: res.insertedIds[i++] };
}
return { [pk]: cond[pk] };
});
}
return res;
if (options.onConflictMergeFields) {
options.onConflictMergeFields = options.onConflictMergeFields.map(rename);
}
async nativeDelete(entityName, where, options = {}) {
if (Utils.isPrimaryKey(where)) {
where = this.buildFilterById(entityName, where);
}
where = this.renameFields(entityName, where, true);
return this.rethrow(this.getConnection('write').deleteMany(entityName, where, options.ctx));
if (options.onConflictExcludeFields) {
options.onConflictExcludeFields = options.onConflictExcludeFields.map(rename);
}
async aggregate(entityName, pipeline, ctx) {
return this.rethrow(this.getConnection('read').aggregate(entityName, pipeline, ctx));
return this.rethrow(
this.getConnection('write').updateMany(entityName, where, data, options.ctx, options.upsert, options),
);
}
async nativeUpdateMany(entityName, where, data, options = {}) {
where = where.map(row => {
if (Utils.isPlainObject(row)) {
return this.renameFields(entityName, row, true);
}
return row;
});
data = data.map(row => {
this.handleVersionProperty(entityName, row, true);
return this.renameFields(entityName, row);
});
options = { ...options };
const meta = this.metadata.find(entityName);
/* v8 ignore next */
const rename = field => (meta ? (meta.properties[field]?.fieldNames[0] ?? field) : field);
if (options.onConflictFields && Array.isArray(options.onConflictFields)) {
options.onConflictFields = options.onConflictFields.map(rename);
}
async *streamAggregate(entityName, pipeline, ctx) {
yield* this.getConnection('read').streamAggregate(entityName, pipeline, ctx);
if (options.onConflictMergeFields) {
options.onConflictMergeFields = options.onConflictMergeFields.map(rename);
}
getPlatform() {
return this.platform;
if (options.onConflictExcludeFields) {
options.onConflictExcludeFields = options.onConflictExcludeFields.map(rename);
}
buildQueryOptions(options) {
if (options.collation != null && typeof options.collation === 'string') {
throw new Error("Collation option for MongoDB must be a CollationOptions object (e.g. { locale: 'en' }). Use a string only with SQL drivers.");
/* v8 ignore next */
const pk = meta?.getPrimaryProps()[0].fieldNames[0] ?? '_id';
const res = await this.rethrow(
this.getConnection('write').bulkUpdateMany(entityName, where, data, options.ctx, options.upsert, options),
);
if (res.insertedIds) {
let i = 0;
res.rows = where.map(cond => {
if (Utils.isEmpty(cond)) {
return { [pk]: res.insertedIds[i++] };
}
const ret = {};
if (options.collation) {
ret.collation = options.collation;
}
if (options.indexHint != null) {
ret.indexHint = options.indexHint;
}
if (options.maxTimeMS != null) {
ret.maxTimeMS = options.maxTimeMS;
}
if (options.allowDiskUse != null) {
ret.allowDiskUse = options.allowDiskUse;
}
return ret;
return { [pk]: cond[pk] };
});
}
renameFields(entityName, data, dotPaths = false, object, root = true) {
if (data == null && root) {
return {};
return res;
}
async nativeDelete(entityName, where, options = {}) {
if (Utils.isPrimaryKey(where)) {
where = this.buildFilterById(entityName, where);
}
where = this.renameFields(entityName, where, true);
return this.rethrow(this.getConnection('write').deleteMany(entityName, where, options.ctx));
}
async aggregate(entityName, pipeline, ctx) {
return this.rethrow(this.getConnection('read').aggregate(entityName, pipeline, ctx));
}
async *streamAggregate(entityName, pipeline, ctx) {
yield* this.getConnection('read').streamAggregate(entityName, pipeline, ctx);
}
getPlatform() {
return this.platform;
}
buildQueryOptions(options) {
if (options.collation != null && typeof options.collation === 'string') {
throw new Error(
"Collation option for MongoDB must be a CollationOptions object (e.g. { locale: 'en' }). Use a string only with SQL drivers.",
);
}
const ret = {};
if (options.collation) {
ret.collation = options.collation;
}
if (options.indexHint != null) {
ret.indexHint = options.indexHint;
}
if (options.maxTimeMS != null) {
ret.maxTimeMS = options.maxTimeMS;
}
if (options.allowDiskUse != null) {
ret.allowDiskUse = options.allowDiskUse;
}
return ret;
}
renameFields(entityName, data, dotPaths = false, object, root = true) {
if (data == null && root) {
return {};
}
if (typeof data !== 'object' || data === null) {
return data;
}
// copy to new variable to prevent changing the T type or doing as unknown casts
const copiedData = Object.assign({}, data); // copy first
const meta = this.metadata.find(entityName);
if (meta?.serializedPrimaryKey && !meta.embeddable && meta.serializedPrimaryKey !== meta.primaryKeys[0]) {
Utils.renameKey(copiedData, meta.serializedPrimaryKey, meta.primaryKeys[0]);
}
if (meta && !meta.embeddable) {
this.inlineEmbeddables(meta, copiedData, dotPaths);
}
// If we had a query with $fulltext and some filter we end up with $and with $fulltext in it.
// We will try to move $fulltext to top level.
if (copiedData.$and) {
for (let i = 0; i < copiedData.$and.length; i++) {
const and = copiedData.$and[i];
if ('$fulltext' in and) {
/* v8 ignore next */
if ('$fulltext' in copiedData) {
throw new Error('Cannot merge multiple $fulltext conditions to top level of the query object.');
}
copiedData.$fulltext = and.$fulltext;
delete and.$fulltext;
}
if (typeof data !== 'object' || data === null) {
return data;
}
// copy to new variable to prevent changing the T type or doing as unknown casts
const copiedData = Object.assign({}, data); // copy first
const meta = this.metadata.find(entityName);
if (meta?.serializedPrimaryKey && !meta.embeddable && meta.serializedPrimaryKey !== meta.primaryKeys[0]) {
Utils.renameKey(copiedData, meta.serializedPrimaryKey, meta.primaryKeys[0]);
}
if (meta && !meta.embeddable) {
this.inlineEmbeddables(meta, copiedData, dotPaths);
}
// If we had a query with $fulltext and some filter we end up with $and with $fulltext in it.
// We will try to move $fulltext to top level.
if (copiedData.$and) {
for (let i = 0; i < copiedData.$and.length; i++) {
const and = copiedData.$and[i];
if ('$fulltext' in and) {
/* v8 ignore next */
if ('$fulltext' in copiedData) {
throw new Error('Cannot merge multiple $fulltext conditions to top level of the query object.');
}
copiedData.$fulltext = and.$fulltext;
delete and.$fulltext;
}
}
}
// move search terms from data['$fulltext'] to mongo's structure: data['$text']['search']
if ('$fulltext' in copiedData) {
copiedData.$text = { $search: copiedData.$fulltext };
delete copiedData.$fulltext;
}
// mongo only allows the $text operator in the root of the object and will
// search all documents where the field has a text index.
if (Utils.hasNestedKey(copiedData, '$fulltext')) {
throw new Error('Full text search is only supported on the top level of the query object.');
}
Utils.keys(copiedData).forEach(k => {
if (Utils.isOperator(k)) {
if (Array.isArray(copiedData[k])) {
copiedData[k] = copiedData[k].map(v => this.renameFields(entityName, v, dotPaths, object, false));
}
else {
copiedData[k] = this.renameFields(entityName, copiedData[k], dotPaths, object, false);
}
return;
}
if (meta?.properties[k]) {
const prop = meta.properties[k];
let isObjectId = false;
if (prop.kind === ReferenceKind.SCALAR) {
isObjectId = prop.type === 'ObjectId';
}
else if (prop.kind === ReferenceKind.EMBEDDED) {
if (copiedData[prop.name] == null) {
return;
}
if (prop.array && Array.isArray(copiedData[prop.name])) {
copiedData[prop.name] = copiedData[prop.name].map((item) => this.renameFields(prop.targetMeta.class, item, dotPaths, true, false));
}
else {
copiedData[prop.name] = this.renameFields(prop.targetMeta.class, copiedData[prop.name], dotPaths, prop.object || object, false);
}
}
else if (prop.polymorphic && prop.fieldNames?.length >= 2) {
// Polymorphic M:1: split into discriminator + FK fields
const value = copiedData[k];
delete copiedData[k];
if (value instanceof PolymorphicRef) {
copiedData[prop.fieldNames[0]] = value.discriminator;
const idField = prop.fieldNames[1];
const targetMeta = this.metadata.find(prop.discriminatorMap[value.discriminator]);
const hasObjectId = targetMeta && targetMeta.properties[targetMeta.primaryKeys[0]]?.type === 'ObjectId';
copiedData[idField] = hasObjectId ? this.convertObjectIds(value.id) : value.id;
}
else if (Array.isArray(value)) {
// Tuple format: [discriminator, id]
copiedData[prop.fieldNames[0]] = value[0];
copiedData[prop.fieldNames[1]] = value[1] != null ? this.convertObjectIds(value[1]) : value[1];
}
else if (value == null) {
prop.fieldNames.forEach(f => (copiedData[f] = null));
}
return;
}
else {
const meta2 = this.metadata.find(prop.targetMeta.class);
const pk = meta2.properties[meta2.primaryKeys[0]];
isObjectId = pk.type === 'ObjectId';
}
if (isObjectId) {
copiedData[k] = this.convertObjectIds(copiedData[k]);
}
if (prop.fieldNames) {
Utils.renameKey(copiedData, k, prop.fieldNames[0]);
}
}
if (Utils.isPlainObject(copiedData[k]) && '$re' in copiedData[k]) {
copiedData[k] = new RegExp(copiedData[k].$re);
}
});
return copiedData;
}
}
convertObjectIds(data) {
if (data instanceof ObjectId) {
return data;
// move search terms from data['$fulltext'] to mongo's structure: data['$text']['search']
if ('$fulltext' in copiedData) {
copiedData.$text = { $search: copiedData.$fulltext };
delete copiedData.$fulltext;
}
// mongo only allows the $text operator in the root of the object and will
// search all documents where the field has a text index.
if (Utils.hasNestedKey(copiedData, '$fulltext')) {
throw new Error('Full text search is only supported on the top level of the query object.');
}
Utils.keys(copiedData).forEach(k => {
if (Utils.isOperator(k)) {
if (Array.isArray(copiedData[k])) {
copiedData[k] = copiedData[k].map(v => this.renameFields(entityName, v, dotPaths, object, false));
} else {
copiedData[k] = this.renameFields(entityName, copiedData[k], dotPaths, object, false);
}
if (typeof data === 'string' && /^[0-9a-f]{24}$/i.exec(data)) {
return new ObjectId(data);
return;
}
if (meta?.properties[k]) {
const prop = meta.properties[k];
let isObjectId = false;
if (prop.kind === ReferenceKind.SCALAR) {
isObjectId = prop.type === 'ObjectId';
} else if (prop.kind === ReferenceKind.EMBEDDED) {
if (copiedData[prop.name] == null) {
return;
}
if (prop.array && Array.isArray(copiedData[prop.name])) {
copiedData[prop.name] = copiedData[prop.name].map(item =>
this.renameFields(prop.targetMeta.class, item, dotPaths, true, false),
);
} else {
copiedData[prop.name] = this.renameFields(
prop.targetMeta.class,
copiedData[prop.name],
dotPaths,
prop.object || object,
false,
);
}
} else if (prop.polymorphic && prop.fieldNames?.length >= 2) {
// Polymorphic M:1: split into discriminator + FK fields
const value = copiedData[k];
delete copiedData[k];
if (value instanceof PolymorphicRef) {
copiedData[prop.fieldNames[0]] = value.discriminator;
const idField = prop.fieldNames[1];
const targetMeta = this.metadata.find(prop.discriminatorMap[value.discriminator]);
const hasObjectId = targetMeta && targetMeta.properties[targetMeta.primaryKeys[0]]?.type === 'ObjectId';
copiedData[idField] = hasObjectId ? this.convertObjectIds(value.id) : value.id;
} else if (Array.isArray(value)) {
// Tuple format: [discriminator, id]
copiedData[prop.fieldNames[0]] = value[0];
copiedData[prop.fieldNames[1]] = value[1] != null ? this.convertObjectIds(value[1]) : value[1];
} else if (value == null) {
prop.fieldNames.forEach(f => (copiedData[f] = null));
}
return;
} else {
const meta2 = this.metadata.find(prop.targetMeta.class);
const pk = meta2.properties[meta2.primaryKeys[0]];
isObjectId = pk.type === 'ObjectId';
}
if (Array.isArray(data)) {
return data.map((item) => this.convertObjectIds(item));
if (isObjectId) {
copiedData[k] = this.convertObjectIds(copiedData[k]);
}
if (Utils.isObject(data)) {
Object.keys(data).forEach(k => {
data[k] = this.convertObjectIds(data[k]);
});
if (prop.fieldNames) {
Utils.renameKey(copiedData, k, prop.fieldNames[0]);
}
return data;
}
if (Utils.isPlainObject(copiedData[k]) && '$re' in copiedData[k]) {
copiedData[k] = new RegExp(copiedData[k].$re);
}
});
return copiedData;
}
convertObjectIds(data) {
if (data instanceof ObjectId) {
return data;
}
buildFilterById(entityName, id) {
const meta = this.metadata.find(entityName);
if (meta.properties[meta.primaryKeys[0]].type === 'ObjectId') {
return { _id: new ObjectId(id) };
}
return { _id: id };
if (typeof data === 'string' && /^[0-9a-f]{24}$/i.exec(data)) {
return new ObjectId(data);
}
buildFields(entityName, populate, fields, exclude) {
const meta = this.metadata.get(entityName);
const lazyProps = meta.props.filter(prop => prop.lazy && !populate.some(p => this.isPopulated(meta, prop, p)));
const ret = [];
if (fields) {
for (let field of fields) {
/* v8 ignore next */
if (Utils.isPlainObject(field)) {
continue;
}
if (field.toString().includes('.')) {
field = field.toString().substring(0, field.toString().indexOf('.'));
}
let prop = meta.properties[field];
/* v8 ignore next */
if (prop) {
if (!prop.fieldNames) {
continue;
}
prop = prop.serializedPrimaryKey ? meta.getPrimaryProps()[0] : prop;
ret.push(prop.fieldNames[0]);
}
else if (field === '*') {
const props = meta.props.filter(prop => this.platform.shouldHaveColumn(prop, populate));
ret.push(...Utils.flatten(props.filter(p => !lazyProps.includes(p)).map(p => p.fieldNames)));
}
else {
ret.push(field);
}
}
ret.unshift(...meta.primaryKeys.filter(pk => !fields.includes(pk)));
}
else if (!Utils.isEmpty(exclude) || lazyProps.some(p => !p.formula)) {
const props = meta.props.filter(prop => this.platform.shouldHaveColumn(prop, populate, exclude));
ret.push(...Utils.flatten(props.filter(p => !lazyProps.includes(p)).map(p => p.fieldNames)));
}
return ret.length > 0 ? ret : undefined;
if (Array.isArray(data)) {
return data.map(item => this.convertObjectIds(item));
}
handleVersionProperty(entityName, data, update = false) {
const meta = this.metadata.find(entityName);
if (!meta?.versionProperty) {
return;
if (Utils.isObject(data)) {
Object.keys(data).forEach(k => {
data[k] = this.convertObjectIds(data[k]);
});
}
return data;
}
buildFilterById(entityName, id) {
const meta = this.metadata.find(entityName);
if (meta.properties[meta.primaryKeys[0]].type === 'ObjectId') {
return { _id: new ObjectId(id) };
}
return { _id: id };
}
buildFields(entityName, populate, fields, exclude) {
const meta = this.metadata.get(entityName);
const lazyProps = meta.props.filter(prop => prop.lazy && !populate.some(p => this.isPopulated(meta, prop, p)));
const ret = [];
if (fields) {
for (let field of fields) {
/* v8 ignore next */
if (Utils.isPlainObject(field)) {
continue;
}
const versionProperty = meta.properties[meta.versionProperty];
if (versionProperty.runtimeType === 'Date') {
data[versionProperty.name] ??= new Date();
if (field.toString().includes('.')) {
field = field.toString().substring(0, field.toString().indexOf('.'));
}
else {
data[versionProperty.name] ??= update ? { $inc: 1 } : 1;
let prop = meta.properties[field];
/* v8 ignore next */
if (prop) {
if (!prop.fieldNames) {
continue;
}
prop = prop.serializedPrimaryKey ? meta.getPrimaryProps()[0] : prop;
ret.push(prop.fieldNames[0]);
} else if (field === '*') {
const props = meta.props.filter(prop => this.platform.shouldHaveColumn(prop, populate));
ret.push(...Utils.flatten(props.filter(p => !lazyProps.includes(p)).map(p => p.fieldNames)));
} else {
ret.push(field);
}
}
ret.unshift(...meta.primaryKeys.filter(pk => !fields.includes(pk)));
} else if (!Utils.isEmpty(exclude) || lazyProps.some(p => !p.formula)) {
const props = meta.props.filter(prop => this.platform.shouldHaveColumn(prop, populate, exclude));
ret.push(...Utils.flatten(props.filter(p => !lazyProps.includes(p)).map(p => p.fieldNames)));
}
/** @inheritDoc */
getORMClass() {
return MongoMikroORM;
return ret.length > 0 ? ret : undefined;
}
handleVersionProperty(entityName, data, update = false) {
const meta = this.metadata.find(entityName);
if (!meta?.versionProperty) {
return;
}
const versionProperty = meta.properties[meta.versionProperty];
if (versionProperty.runtimeType === 'Date') {
data[versionProperty.name] ??= new Date();
} else {
data[versionProperty.name] ??= update ? { $inc: 1 } : 1;
}
}
/** @inheritDoc */
getORMClass() {
return MongoMikroORM;
}
}

@@ -1,2 +0,10 @@

import { EntityManager, type EntityName, type EntityRepository, type GetRepository, type TransactionOptions, type StreamOptions, type Loaded } from '@mikro-orm/core';
import {
EntityManager,
type EntityName,
type EntityRepository,
type GetRepository,
type TransactionOptions,
type StreamOptions,
type Loaded,
} from '@mikro-orm/core';
import type { Collection, Document, TransactionOptions as MongoTransactionOptions } from 'mongodb';

@@ -9,27 +17,37 @@ import type { MongoDriver } from './MongoDriver.js';

export declare class MongoEntityManager<Driver extends MongoDriver = MongoDriver> extends EntityManager<Driver> {
/**
* Shortcut to driver's aggregate method. Available in MongoDriver only.
*/
aggregate(entityName: EntityName, pipeline: any[]): Promise<any[]>;
/**
* Shortcut to driver's aggregate method. Returns a stream. Available in MongoDriver only.
*/
streamAggregate<T extends object>(entityName: EntityName, pipeline: any[]): AsyncIterableIterator<T>;
/**
* @inheritDoc
*/
stream<Entity extends object, Hint extends string = never, Fields extends string = never, Excludes extends string = never>(entityName: EntityName<Entity>, options?: StreamOptions<NoInfer<Entity>, Hint, Fields, Excludes>): AsyncIterableIterator<Loaded<Entity, Hint, Fields, Excludes>>;
getCollection<T extends Document>(entityOrCollectionName: EntityName<T> | string): Collection<T>;
/**
* @inheritDoc
*/
getRepository<T extends object, U extends EntityRepository<T> = MongoEntityRepository<T>>(entityName: EntityName<T>): GetRepository<T, U>;
/**
* @inheritDoc
*/
begin(options?: Omit<TransactionOptions, 'ignoreNestedTransactions'> & MongoTransactionOptions): Promise<void>;
/**
* @inheritDoc
*/
transactional<T>(cb: (em: this) => Promise<T>, options?: TransactionOptions & MongoTransactionOptions): Promise<T>;
/**
* Shortcut to driver's aggregate method. Available in MongoDriver only.
*/
aggregate(entityName: EntityName, pipeline: any[]): Promise<any[]>;
/**
* Shortcut to driver's aggregate method. Returns a stream. Available in MongoDriver only.
*/
streamAggregate<T extends object>(entityName: EntityName, pipeline: any[]): AsyncIterableIterator<T>;
/**
* @inheritDoc
*/
stream<
Entity extends object,
Hint extends string = never,
Fields extends string = never,
Excludes extends string = never,
>(
entityName: EntityName<Entity>,
options?: StreamOptions<NoInfer<Entity>, Hint, Fields, Excludes>,
): AsyncIterableIterator<Loaded<Entity, Hint, Fields, Excludes>>;
getCollection<T extends Document>(entityOrCollectionName: EntityName<T> | string): Collection<T>;
/**
* @inheritDoc
*/
getRepository<T extends object, U extends EntityRepository<T> = MongoEntityRepository<T>>(
entityName: EntityName<T>,
): GetRepository<T, U>;
/**
* @inheritDoc
*/
begin(options?: Omit<TransactionOptions, 'ignoreNestedTransactions'> & MongoTransactionOptions): Promise<void>;
/**
* @inheritDoc
*/
transactional<T>(cb: (em: this) => Promise<T>, options?: TransactionOptions & MongoTransactionOptions): Promise<T>;
}

@@ -1,2 +0,2 @@

import { EntityManager, Utils, } from '@mikro-orm/core';
import { EntityManager, Utils } from '@mikro-orm/core';
/**

@@ -6,44 +6,44 @@ * @inheritDoc

export class MongoEntityManager extends EntityManager {
/**
* Shortcut to driver's aggregate method. Available in MongoDriver only.
*/
async aggregate(entityName, pipeline) {
return this.getDriver().aggregate(entityName, pipeline, this.getTransactionContext());
/**
* Shortcut to driver's aggregate method. Available in MongoDriver only.
*/
async aggregate(entityName, pipeline) {
return this.getDriver().aggregate(entityName, pipeline, this.getTransactionContext());
}
/**
* Shortcut to driver's aggregate method. Returns a stream. Available in MongoDriver only.
*/
async *streamAggregate(entityName, pipeline) {
yield* this.getDriver().streamAggregate(entityName, pipeline, this.getTransactionContext());
}
/**
* @inheritDoc
*/
async *stream(entityName, options = {}) {
if (!Utils.isEmpty(options.populate)) {
throw new Error('Populate option is not supported when streaming results in MongoDB');
}
/**
* Shortcut to driver's aggregate method. Returns a stream. Available in MongoDriver only.
*/
async *streamAggregate(entityName, pipeline) {
yield* this.getDriver().streamAggregate(entityName, pipeline, this.getTransactionContext());
}
/**
* @inheritDoc
*/
async *stream(entityName, options = {}) {
if (!Utils.isEmpty(options.populate)) {
throw new Error('Populate option is not supported when streaming results in MongoDB');
}
yield* super.stream(entityName, options);
}
getCollection(entityOrCollectionName) {
return this.getConnection().getCollection(entityOrCollectionName);
}
/**
* @inheritDoc
*/
getRepository(entityName) {
return super.getRepository(entityName);
}
/**
* @inheritDoc
*/
async begin(options = {}) {
return super.begin(options);
}
/**
* @inheritDoc
*/
async transactional(cb, options = {}) {
return super.transactional(cb, options);
}
yield* super.stream(entityName, options);
}
getCollection(entityOrCollectionName) {
return this.getConnection().getCollection(entityOrCollectionName);
}
/**
* @inheritDoc
*/
getRepository(entityName) {
return super.getRepository(entityName);
}
/**
* @inheritDoc
*/
async begin(options = {}) {
return super.begin(options);
}
/**
* @inheritDoc
*/
async transactional(cb, options = {}) {
return super.transactional(cb, options);
}
}

@@ -6,13 +6,13 @@ import { EntityRepository, type EntityName } from '@mikro-orm/core';

export declare class MongoEntityRepository<T extends object> extends EntityRepository<T> {
protected readonly em: MongoEntityManager;
constructor(em: MongoEntityManager, entityName: EntityName<T>);
/**
* Shortcut to driver's aggregate method. Available in MongoDriver only.
*/
aggregate(pipeline: any[]): Promise<any[]>;
getCollection(): Collection<T>;
/**
* @inheritDoc
*/
getEntityManager(): MongoEntityManager;
protected readonly em: MongoEntityManager;
constructor(em: MongoEntityManager, entityName: EntityName<T>);
/**
* Shortcut to driver's aggregate method. Available in MongoDriver only.
*/
aggregate(pipeline: any[]): Promise<any[]>;
getCollection(): Collection<T>;
/**
* @inheritDoc
*/
getEntityManager(): MongoEntityManager;
}
import { EntityRepository } from '@mikro-orm/core';
/** Entity repository with MongoDB-specific methods such as `aggregate()`. */
export class MongoEntityRepository extends EntityRepository {
em;
constructor(em, entityName) {
super(em, entityName);
this.em = em;
}
/**
* Shortcut to driver's aggregate method. Available in MongoDriver only.
*/
async aggregate(pipeline) {
return this.getEntityManager().aggregate(this.entityName, pipeline);
}
getCollection() {
return this.getEntityManager().getCollection(this.entityName);
}
/**
* @inheritDoc
*/
getEntityManager() {
return this.em;
}
em;
constructor(em, entityName) {
super(em, entityName);
this.em = em;
}
/**
* Shortcut to driver's aggregate method. Available in MongoDriver only.
*/
async aggregate(pipeline) {
return this.getEntityManager().aggregate(this.entityName, pipeline);
}
getCollection() {
return this.getEntityManager().getCollection(this.entityName);
}
/**
* @inheritDoc
*/
getEntityManager() {
return this.em;
}
}
import { ExceptionConverter, type Dictionary, type DriverException } from '@mikro-orm/core';
/** Converts MongoDB native errors into typed MikroORM driver exceptions. */
export declare class MongoExceptionConverter extends ExceptionConverter {
/**
* @see https://gist.github.com/rluvaton/a97a8da46ab6541a3e5702e83b9d357b
*/
convertException(exception: Error & Dictionary): DriverException;
/**
* @see https://gist.github.com/rluvaton/a97a8da46ab6541a3e5702e83b9d357b
*/
convertException(exception: Error & Dictionary): DriverException;
}

@@ -1,17 +0,17 @@

import { UniqueConstraintViolationException, ExceptionConverter, TableExistsException, } from '@mikro-orm/core';
import { UniqueConstraintViolationException, ExceptionConverter, TableExistsException } from '@mikro-orm/core';
/** Converts MongoDB native errors into typed MikroORM driver exceptions. */
export class MongoExceptionConverter extends ExceptionConverter {
/**
* @see https://gist.github.com/rluvaton/a97a8da46ab6541a3e5702e83b9d357b
*/
/* v8 ignore next */
convertException(exception) {
switch (exception.code) {
case 48:
return new TableExistsException(exception);
case 11000:
return new UniqueConstraintViolationException(exception);
}
return super.convertException(exception);
/**
* @see https://gist.github.com/rluvaton/a97a8da46ab6541a3e5702e83b9d357b
*/
/* v8 ignore next */
convertException(exception) {
switch (exception.code) {
case 48:
return new TableExistsException(exception);
case 11000:
return new UniqueConstraintViolationException(exception);
}
return super.convertException(exception);
}
}

@@ -1,24 +0,63 @@

import { type AnyEntity, type EntityClass, type EntitySchema, MikroORM, type Options, type IDatabaseDriver, type EntityManager, type EntityManagerType, type IMigrator } from '@mikro-orm/core';
import {
type AnyEntity,
type EntityClass,
type EntitySchema,
MikroORM,
type Options,
type IDatabaseDriver,
type EntityManager,
type EntityManagerType,
type IMigrator,
} from '@mikro-orm/core';
import { MongoDriver } from './MongoDriver.js';
import type { MongoEntityManager } from './MongoEntityManager.js';
/** Configuration options for the MongoDB driver. */
export type MongoOptions<EM extends MongoEntityManager = MongoEntityManager, Entities extends (string | EntityClass<AnyEntity> | EntitySchema)[] = (string | EntityClass<AnyEntity> | EntitySchema)[]> = Partial<Options<MongoDriver, EM, Entities>>;
export type MongoOptions<
EM extends MongoEntityManager = MongoEntityManager,
Entities extends (string | EntityClass<AnyEntity> | EntitySchema)[] = (
| string
| EntityClass<AnyEntity>
| EntitySchema
)[],
> = Partial<Options<MongoDriver, EM, Entities>>;
/** Creates a type-safe configuration object for the MongoDB driver. */
export declare function defineMongoConfig<EM extends MongoEntityManager = MongoEntityManager, Entities extends (string | EntityClass<AnyEntity> | EntitySchema)[] = (string | EntityClass<AnyEntity> | EntitySchema)[]>(options: MongoOptions<EM, Entities>): MongoOptions<EM, Entities>;
export declare function defineMongoConfig<
EM extends MongoEntityManager = MongoEntityManager,
Entities extends (string | EntityClass<AnyEntity> | EntitySchema)[] = (
| string
| EntityClass<AnyEntity>
| EntitySchema
)[],
>(options: MongoOptions<EM, Entities>): MongoOptions<EM, Entities>;
/**
* @inheritDoc
*/
export declare class MongoMikroORM<EM extends MongoEntityManager = MongoEntityManager, Entities extends (string | EntityClass<AnyEntity> | EntitySchema)[] = (string | EntityClass<AnyEntity> | EntitySchema)[]> extends MikroORM<MongoDriver, EM, Entities> {
/**
* @inheritDoc
*/
static init<D extends IDatabaseDriver = MongoDriver, EM extends EntityManager<D> = D[typeof EntityManagerType] & EntityManager<D>, Entities extends (string | EntityClass<AnyEntity> | EntitySchema)[] = (string | EntityClass<AnyEntity> | EntitySchema)[]>(options: Partial<Options<D, EM, Entities>>): Promise<MikroORM<D, EM, Entities>>;
/**
* @inheritDoc
*/
constructor(options: Partial<Options<MongoDriver, EM, Entities>>);
/**
* Gets the Migrator.
*/
get migrator(): IMigrator;
export declare class MongoMikroORM<
EM extends MongoEntityManager = MongoEntityManager,
Entities extends (string | EntityClass<AnyEntity> | EntitySchema)[] = (
| string
| EntityClass<AnyEntity>
| EntitySchema
)[],
> extends MikroORM<MongoDriver, EM, Entities> {
/**
* @inheritDoc
*/
static init<
D extends IDatabaseDriver = MongoDriver,
EM extends EntityManager<D> = D[typeof EntityManagerType] & EntityManager<D>,
Entities extends (string | EntityClass<AnyEntity> | EntitySchema)[] = (
| string
| EntityClass<AnyEntity>
| EntitySchema
)[],
>(options: Partial<Options<D, EM, Entities>>): Promise<MikroORM<D, EM, Entities>>;
/**
* @inheritDoc
*/
constructor(options: Partial<Options<MongoDriver, EM, Entities>>);
/**
* Gets the Migrator.
*/
get migrator(): IMigrator;
}

@@ -1,6 +0,6 @@

import { defineConfig, MikroORM, } from '@mikro-orm/core';
import { defineConfig, MikroORM } from '@mikro-orm/core';
import { MongoDriver } from './MongoDriver.js';
/** Creates a type-safe configuration object for the MongoDB driver. */
export function defineMongoConfig(options) {
return defineConfig({ driver: MongoDriver, ...options });
return defineConfig({ driver: MongoDriver, ...options });
}

@@ -11,22 +11,22 @@ /**

export class MongoMikroORM extends MikroORM {
/**
* @inheritDoc
*/
static async init(options) {
return super.init(defineMongoConfig(options));
}
/**
* @inheritDoc
*/
constructor(options) {
super(defineMongoConfig(options));
}
/**
* Gets the Migrator.
*/
get migrator() {
return this.driver
.getPlatform()
.getExtension('Migrator', '@mikro-orm/migrator', '@mikro-orm/migrations-mongodb', this.em);
}
/**
* @inheritDoc
*/
static async init(options) {
return super.init(defineMongoConfig(options));
}
/**
* @inheritDoc
*/
constructor(options) {
super(defineMongoConfig(options));
}
/**
* Gets the Migrator.
*/
get migrator() {
return this.driver
.getPlatform()
.getExtension('Migrator', '@mikro-orm/migrator', '@mikro-orm/migrations-mongodb', this.em);
}
}
import { ObjectId } from 'mongodb';
import { Platform, type IPrimaryKey, type Primary, type NamingStrategy, type Constructor, type EntityRepository, type EntityProperty, type PopulateOptions, type EntityMetadata, type IDatabaseDriver, type EntityManager, type Configuration, type MikroORM, type TransformContext } from '@mikro-orm/core';
import {
Platform,
type IPrimaryKey,
type Primary,
type NamingStrategy,
type Constructor,
type EntityRepository,
type EntityProperty,
type PopulateOptions,
type EntityMetadata,
type IDatabaseDriver,
type EntityManager,
type Configuration,
type MikroORM,
type TransformContext,
} from '@mikro-orm/core';
import { MongoExceptionConverter } from './MongoExceptionConverter.js';

@@ -7,25 +22,25 @@ import { MongoSchemaGenerator } from './MongoSchemaGenerator.js';

export declare class MongoPlatform extends Platform {
protected readonly exceptionConverter: MongoExceptionConverter;
setConfig(config: Configuration): void;
getNamingStrategy(): {
new (): NamingStrategy;
};
getRepositoryClass<T extends object>(): Constructor<EntityRepository<T>>;
/** @inheritDoc */
lookupExtensions(orm: MikroORM): void;
/** @inheritDoc */
getExtension<T>(extensionName: string, extensionKey: string, moduleName: string, em: EntityManager): T;
getSchemaGenerator(driver: IDatabaseDriver, em?: EntityManager): MongoSchemaGenerator;
normalizePrimaryKey<T extends number | string = number | string>(data: Primary<T> | IPrimaryKey | ObjectId): T;
denormalizePrimaryKey(data: number | string): IPrimaryKey;
usesImplicitTransactions(): boolean;
convertsJsonAutomatically(): boolean;
convertJsonToDatabaseValue(value: unknown): unknown;
convertJsonToJSValue(value: unknown, context?: TransformContext): unknown;
marshallArray(values: string[]): string;
cloneEmbeddable<T>(data: T): T;
shouldHaveColumn<T>(prop: EntityProperty<T>, populate: PopulateOptions<T>[], exclude?: string[]): boolean;
validateMetadata(meta: EntityMetadata): void;
isAllowedTopLevelOperator(operator: string): boolean;
getDefaultClientUrl(): string;
protected readonly exceptionConverter: MongoExceptionConverter;
setConfig(config: Configuration): void;
getNamingStrategy(): {
new (): NamingStrategy;
};
getRepositoryClass<T extends object>(): Constructor<EntityRepository<T>>;
/** @inheritDoc */
lookupExtensions(orm: MikroORM): void;
/** @inheritDoc */
getExtension<T>(extensionName: string, extensionKey: string, moduleName: string, em: EntityManager): T;
getSchemaGenerator(driver: IDatabaseDriver, em?: EntityManager): MongoSchemaGenerator;
normalizePrimaryKey<T extends number | string = number | string>(data: Primary<T> | IPrimaryKey | ObjectId): T;
denormalizePrimaryKey(data: number | string): IPrimaryKey;
usesImplicitTransactions(): boolean;
convertsJsonAutomatically(): boolean;
convertJsonToDatabaseValue(value: unknown): unknown;
convertJsonToJSValue(value: unknown, context?: TransformContext): unknown;
marshallArray(values: string[]): string;
cloneEmbeddable<T>(data: T): T;
shouldHaveColumn<T>(prop: EntityProperty<T>, populate: PopulateOptions<T>[], exclude?: string[]): boolean;
validateMetadata(meta: EntityMetadata): void;
isAllowedTopLevelOperator(operator: string): boolean;
getDefaultClientUrl(): string;
}
import { ObjectId } from 'mongodb';
import { Platform, MongoNamingStrategy, Utils, ReferenceKind, MetadataError, } from '@mikro-orm/core';
import { Platform, MongoNamingStrategy, Utils, ReferenceKind, MetadataError } from '@mikro-orm/core';
import { MongoExceptionConverter } from './MongoExceptionConverter.js';

@@ -8,84 +8,84 @@ import { MongoEntityRepository } from './MongoEntityRepository.js';

export class MongoPlatform extends Platform {
exceptionConverter = new MongoExceptionConverter();
setConfig(config) {
config.set('autoJoinOneToOneOwner', false);
config.set('loadStrategy', 'select-in');
config.get('discovery').inferDefaultValues = false;
super.setConfig(config);
exceptionConverter = new MongoExceptionConverter();
setConfig(config) {
config.set('autoJoinOneToOneOwner', false);
config.set('loadStrategy', 'select-in');
config.get('discovery').inferDefaultValues = false;
super.setConfig(config);
}
getNamingStrategy() {
return MongoNamingStrategy;
}
getRepositoryClass() {
return MongoEntityRepository;
}
/** @inheritDoc */
lookupExtensions(orm) {
MongoSchemaGenerator.register(orm);
}
/** @inheritDoc */
getExtension(extensionName, extensionKey, moduleName, em) {
if (extensionName === 'EntityGenerator') {
throw new Error('EntityGenerator is not supported for this driver.');
}
getNamingStrategy() {
return MongoNamingStrategy;
if (extensionName === 'Migrator') {
return super.getExtension('Migrator', '@mikro-orm/migrator', '@mikro-orm/migrations-mongodb', em);
}
getRepositoryClass() {
return MongoEntityRepository;
/* v8 ignore next */
return super.getExtension(extensionName, extensionKey, moduleName, em);
}
/* v8 ignore next: kept for type inference only */
getSchemaGenerator(driver, em) {
return new MongoSchemaGenerator(em ?? driver);
}
normalizePrimaryKey(data) {
if (Utils.isObject(data) && data.constructor?.name === 'ObjectId') {
return data.toHexString();
}
/** @inheritDoc */
lookupExtensions(orm) {
MongoSchemaGenerator.register(orm);
return data;
}
denormalizePrimaryKey(data) {
return new ObjectId('' + data);
}
usesImplicitTransactions() {
return false;
}
convertsJsonAutomatically() {
return true;
}
convertJsonToDatabaseValue(value) {
return Utils.copy(value);
}
convertJsonToJSValue(value, context) {
return value;
}
marshallArray(values) {
return values;
}
cloneEmbeddable(data) {
const ret = super.cloneEmbeddable(data);
Utils.dropUndefinedProperties(ret);
return ret;
}
shouldHaveColumn(prop, populate, exclude) {
if (super.shouldHaveColumn(prop, populate, exclude)) {
return true;
}
/** @inheritDoc */
getExtension(extensionName, extensionKey, moduleName, em) {
if (extensionName === 'EntityGenerator') {
throw new Error('EntityGenerator is not supported for this driver.');
}
if (extensionName === 'Migrator') {
return super.getExtension('Migrator', '@mikro-orm/migrator', '@mikro-orm/migrations-mongodb', em);
}
/* v8 ignore next */
return super.getExtension(extensionName, extensionKey, moduleName, em);
return prop.kind === ReferenceKind.MANY_TO_MANY && prop.owner;
}
validateMetadata(meta) {
if (meta.inheritanceType === 'tpt') {
throw MetadataError.tptNotSupportedByDriver(meta);
}
/* v8 ignore next: kept for type inference only */
getSchemaGenerator(driver, em) {
return new MongoSchemaGenerator(em ?? driver);
const pk = meta.getPrimaryProps()[0];
if (pk && pk.fieldNames?.[0] !== '_id') {
throw MetadataError.invalidPrimaryKey(meta, pk, '_id');
}
normalizePrimaryKey(data) {
if (Utils.isObject(data) && data.constructor?.name === 'ObjectId') {
return data.toHexString();
}
return data;
}
denormalizePrimaryKey(data) {
return new ObjectId('' + data);
}
usesImplicitTransactions() {
return false;
}
convertsJsonAutomatically() {
return true;
}
convertJsonToDatabaseValue(value) {
return Utils.copy(value);
}
convertJsonToJSValue(value, context) {
return value;
}
marshallArray(values) {
return values;
}
cloneEmbeddable(data) {
const ret = super.cloneEmbeddable(data);
Utils.dropUndefinedProperties(ret);
return ret;
}
shouldHaveColumn(prop, populate, exclude) {
if (super.shouldHaveColumn(prop, populate, exclude)) {
return true;
}
return prop.kind === ReferenceKind.MANY_TO_MANY && prop.owner;
}
validateMetadata(meta) {
if (meta.inheritanceType === 'tpt') {
throw MetadataError.tptNotSupportedByDriver(meta);
}
const pk = meta.getPrimaryProps()[0];
if (pk && pk.fieldNames?.[0] !== '_id') {
throw MetadataError.invalidPrimaryKey(meta, pk, '_id');
}
}
isAllowedTopLevelOperator(operator) {
return ['$not', '$fulltext'].includes(operator);
}
getDefaultClientUrl() {
return 'mongodb://127.0.0.1:27017';
}
}
isAllowedTopLevelOperator(operator) {
return ['$not', '$fulltext'].includes(operator);
}
getDefaultClientUrl() {
return 'mongodb://127.0.0.1:27017';
}
}

@@ -6,33 +6,31 @@ import { type CreateSchemaOptions, type MikroORM } from '@mikro-orm/core';

export declare class MongoSchemaGenerator extends AbstractSchemaGenerator<MongoDriver> {
static register(orm: MikroORM): void;
create(options?: MongoCreateSchemaOptions): Promise<void>;
drop(options?: {
dropMigrationsTable?: boolean;
}): Promise<void>;
update(options?: MongoCreateSchemaOptions): Promise<void>;
ensureDatabase(): Promise<boolean>;
refresh(options?: MongoCreateSchemaOptions): Promise<void>;
dropIndexes(options?: {
skipIndexes?: {
collection: string;
indexName: string;
}[];
collectionsWithFailedIndexes?: string[];
}): Promise<void>;
ensureIndexes(options?: EnsureIndexesOptions): Promise<void>;
private mapIndexProperties;
private createIndexes;
private executeQuery;
private createUniqueIndexes;
private createPropertyIndexes;
static register(orm: MikroORM): void;
create(options?: MongoCreateSchemaOptions): Promise<void>;
drop(options?: { dropMigrationsTable?: boolean }): Promise<void>;
update(options?: MongoCreateSchemaOptions): Promise<void>;
ensureDatabase(): Promise<boolean>;
refresh(options?: MongoCreateSchemaOptions): Promise<void>;
dropIndexes(options?: {
skipIndexes?: {
collection: string;
indexName: string;
}[];
collectionsWithFailedIndexes?: string[];
}): Promise<void>;
ensureIndexes(options?: EnsureIndexesOptions): Promise<void>;
private mapIndexProperties;
private createIndexes;
private executeQuery;
private createUniqueIndexes;
private createPropertyIndexes;
}
export interface MongoCreateSchemaOptions extends CreateSchemaOptions {
/** create indexes? defaults to true */
ensureIndexes?: boolean;
/** create indexes? defaults to true */
ensureIndexes?: boolean;
}
/** Options for the `ensureIndexes()` method of `MongoSchemaGenerator`. */
export interface EnsureIndexesOptions {
ensureCollections?: boolean;
retry?: boolean | string[];
retryLimit?: number;
ensureCollections?: boolean;
retry?: boolean | string[];
retryLimit?: number;
}

@@ -1,224 +0,228 @@

import { Utils, inspect, } from '@mikro-orm/core';
import { Utils, inspect } from '@mikro-orm/core';
import { AbstractSchemaGenerator } from '@mikro-orm/core/schema';
/** Schema generator for MongoDB that manages collections and indexes. */
export class MongoSchemaGenerator extends AbstractSchemaGenerator {
static register(orm) {
orm.config.registerExtension('@mikro-orm/schema-generator', () => new MongoSchemaGenerator(orm.em));
static register(orm) {
orm.config.registerExtension('@mikro-orm/schema-generator', () => new MongoSchemaGenerator(orm.em));
}
async create(options = {}) {
await this.connection.ensureConnection();
options.ensureIndexes ??= true;
const existing = await this.connection.listCollections();
const metadata = this.getOrderedMetadata();
/* v8 ignore next */
const promises = metadata
.filter(meta => !existing.includes(meta.collection))
.map(meta =>
this.connection.createCollection(meta.class).catch(err => {
const existsErrorMessage = `Collection ${this.config.get('dbName')}.${meta.collection} already exists.`;
// ignore errors about the collection already existing
if (!(err.name === 'MongoServerError' && err.message.includes(existsErrorMessage))) {
throw err;
}
}),
);
if (options.ensureIndexes) {
await this.ensureIndexes({ ensureCollections: false });
}
async create(options = {}) {
await this.connection.ensureConnection();
options.ensureIndexes ??= true;
const existing = await this.connection.listCollections();
const metadata = this.getOrderedMetadata();
await Promise.all(promises);
}
async drop(options = {}) {
await this.connection.ensureConnection();
const existing = await this.connection.listCollections();
const metadata = this.getOrderedMetadata();
if (options.dropMigrationsTable) {
metadata.push({ collection: this.config.get('migrations').tableName });
}
const promises = metadata
.filter(meta => existing.includes(meta.collection))
.map(meta => this.connection.dropCollection(meta.class));
await Promise.all(promises);
}
async update(options = {}) {
await this.create(options);
}
async ensureDatabase() {
return false;
}
async refresh(options = {}) {
await this.ensureDatabase();
await this.drop();
await this.create(options);
}
async dropIndexes(options) {
await this.connection.ensureConnection();
const db = this.connection.getDb();
const collections = await db.listCollections().toArray();
const promises = [];
for (const collection of collections) {
if (options?.collectionsWithFailedIndexes && !options.collectionsWithFailedIndexes.includes(collection.name)) {
continue;
}
const indexes = await db.collection(collection.name).listIndexes().toArray();
for (const index of indexes) {
const isIdIndex = index.key._id === 1 && Utils.getObjectKeysSize(index.key) === 1;
/* v8 ignore next */
const promises = metadata
.filter(meta => !existing.includes(meta.collection))
.map(meta => this.connection.createCollection(meta.class).catch(err => {
const existsErrorMessage = `Collection ${this.config.get('dbName')}.${meta.collection} already exists.`;
// ignore errors about the collection already existing
if (!(err.name === 'MongoServerError' && err.message.includes(existsErrorMessage))) {
throw err;
}
}));
if (options.ensureIndexes) {
await this.ensureIndexes({ ensureCollections: false });
if (
!isIdIndex &&
!options?.skipIndexes?.find(idx => idx.collection === collection.name && idx.indexName === index.name)
) {
promises.push(this.executeQuery(db.collection(collection.name), 'dropIndex', index.name));
}
await Promise.all(promises);
}
}
async drop(options = {}) {
await this.connection.ensureConnection();
const existing = await this.connection.listCollections();
const metadata = this.getOrderedMetadata();
if (options.dropMigrationsTable) {
metadata.push({ collection: this.config.get('migrations').tableName });
}
const promises = metadata
.filter(meta => existing.includes(meta.collection))
.map(meta => this.connection.dropCollection(meta.class));
await Promise.all(promises);
await Promise.all(promises);
}
async ensureIndexes(options = {}) {
await this.connection.ensureConnection();
options.ensureCollections ??= true;
options.retryLimit ??= 3;
if (options.ensureCollections) {
await this.create({ ensureIndexes: false });
}
async update(options = {}) {
await this.create(options);
const promises = [];
for (const meta of this.getOrderedMetadata()) {
if (Array.isArray(options?.retry) && !options.retry.includes(meta.collection)) {
continue;
}
promises.push(...this.createIndexes(meta));
promises.push(...this.createUniqueIndexes(meta));
for (const prop of meta.props) {
promises.push(...this.createPropertyIndexes(meta, prop, 'index'));
promises.push(...this.createPropertyIndexes(meta, prop, 'unique'));
}
}
async ensureDatabase() {
return false;
}
async refresh(options = {}) {
await this.ensureDatabase();
await this.drop();
await this.create(options);
}
async dropIndexes(options) {
await this.connection.ensureConnection();
const db = this.connection.getDb();
const collections = await db.listCollections().toArray();
const promises = [];
for (const collection of collections) {
if (options?.collectionsWithFailedIndexes && !options.collectionsWithFailedIndexes.includes(collection.name)) {
continue;
}
const indexes = await db.collection(collection.name).listIndexes().toArray();
for (const index of indexes) {
const isIdIndex = index.key._id === 1 && Utils.getObjectKeysSize(index.key) === 1;
/* v8 ignore next */
if (!isIdIndex &&
!options?.skipIndexes?.find(idx => idx.collection === collection.name && idx.indexName === index.name)) {
promises.push(this.executeQuery(db.collection(collection.name), 'dropIndex', index.name));
}
}
const res = await Promise.allSettled(promises.map(p => p[1]));
if (res.some(r => r.status === 'rejected') && options.retry !== false) {
const skipIndexes = [];
const collectionsWithFailedIndexes = [];
const errors = [];
for (let i = 0; i < res.length; i++) {
const r = res[i];
if (r.status === 'rejected') {
collectionsWithFailedIndexes.push(promises[i][0]);
errors.push(r.reason);
} else {
skipIndexes.push({ collection: promises[i][0], indexName: r.value });
}
await Promise.all(promises);
}
await this.dropIndexes({ skipIndexes, collectionsWithFailedIndexes });
if (options.retryLimit === 0) {
const details = errors.map(e => e.message).join('\n');
const message = `Failed to create indexes on the following collections: ${collectionsWithFailedIndexes.join(', ')}\n${details}`;
throw new Error(message, { cause: errors });
}
await this.ensureIndexes({
retry: collectionsWithFailedIndexes,
retryLimit: options.retryLimit - 1,
});
}
async ensureIndexes(options = {}) {
await this.connection.ensureConnection();
options.ensureCollections ??= true;
options.retryLimit ??= 3;
if (options.ensureCollections) {
await this.create({ ensureIndexes: false });
}
mapIndexProperties(index, meta) {
return Utils.flatten(
Utils.asArray(index.properties).map(propName => {
const rootPropName = propName.split('.')[0];
const prop = meta.properties[rootPropName];
if (propName.includes('.')) {
return [prop.fieldNames[0] + propName.substring(propName.indexOf('.'))];
}
const promises = [];
for (const meta of this.getOrderedMetadata()) {
if (Array.isArray(options?.retry) && !options.retry.includes(meta.collection)) {
continue;
}
promises.push(...this.createIndexes(meta));
promises.push(...this.createUniqueIndexes(meta));
for (const prop of meta.props) {
promises.push(...this.createPropertyIndexes(meta, prop, 'index'));
promises.push(...this.createPropertyIndexes(meta, prop, 'unique'));
}
return prop?.fieldNames ?? propName;
}),
);
}
createIndexes(meta) {
const res = [];
meta.indexes.forEach(index => {
let fieldOrSpec;
const properties = this.mapIndexProperties(index, meta);
const collection = this.connection.getCollection(meta.class);
if (Array.isArray(index.options) && index.options.length === 2 && properties.length === 0) {
res.push([collection.collectionName, collection.createIndex(index.options[0], index.options[1])]);
return;
}
if (index.options && properties.length === 0) {
res.push([collection.collectionName, collection.createIndex(index.options)]);
return;
}
if (index.type) {
if (index.type === 'fulltext') {
index.type = 'text';
}
const res = await Promise.allSettled(promises.map(p => p[1]));
if (res.some(r => r.status === 'rejected') && options.retry !== false) {
const skipIndexes = [];
const collectionsWithFailedIndexes = [];
const errors = [];
for (let i = 0; i < res.length; i++) {
const r = res[i];
if (r.status === 'rejected') {
collectionsWithFailedIndexes.push(promises[i][0]);
errors.push(r.reason);
}
else {
skipIndexes.push({ collection: promises[i][0], indexName: r.value });
}
}
await this.dropIndexes({ skipIndexes, collectionsWithFailedIndexes });
if (options.retryLimit === 0) {
const details = errors.map(e => e.message).join('\n');
const message = `Failed to create indexes on the following collections: ${collectionsWithFailedIndexes.join(', ')}\n${details}`;
throw new Error(message, { cause: errors });
}
await this.ensureIndexes({
retry: collectionsWithFailedIndexes,
retryLimit: options.retryLimit - 1,
});
}
const spec = {};
properties.forEach(prop => (spec[prop] = index.type));
fieldOrSpec = spec;
} else {
fieldOrSpec = properties.reduce((o, i) => {
o[i] = 1;
return o;
}, {});
}
// MongoDB uses 'hidden' for invisible indexes
const indexOptions = {
name: index.name,
unique: false,
...index.options,
};
if (index.invisible) {
indexOptions.hidden = true;
}
res.push([collection.collectionName, this.executeQuery(collection, 'createIndex', fieldOrSpec, indexOptions)]);
});
return res;
}
async executeQuery(collection, method, ...args) {
const now = Date.now();
return collection[method](...args).then(res => {
Utils.dropUndefinedProperties(args);
const query = `db.getCollection('${collection.collectionName}').${method}(${args.map(arg => inspect(arg)).join(', ')});`;
this.config.getLogger().logQuery({
level: 'info',
query,
took: Date.now() - now,
});
return res;
});
}
createUniqueIndexes(meta) {
const res = [];
meta.uniques.forEach(index => {
const properties = this.mapIndexProperties(index, meta);
const fieldOrSpec = properties.reduce((o, i) => {
o[i] = 1;
return o;
}, {});
const collection = this.connection.getCollection(meta.class);
res.push([
collection.collectionName,
this.executeQuery(collection, 'createIndex', fieldOrSpec, {
name: index.name,
unique: true,
...index.options,
}),
]);
});
return res;
}
createPropertyIndexes(meta, prop, type) {
if (!prop[type] || !meta.collection) {
return [];
}
mapIndexProperties(index, meta) {
return Utils.flatten(Utils.asArray(index.properties).map(propName => {
const rootPropName = propName.split('.')[0];
const prop = meta.properties[rootPropName];
if (propName.includes('.')) {
return [prop.fieldNames[0] + propName.substring(propName.indexOf('.'))];
}
return prop?.fieldNames ?? propName;
}));
}
createIndexes(meta) {
const res = [];
meta.indexes.forEach(index => {
let fieldOrSpec;
const properties = this.mapIndexProperties(index, meta);
const collection = this.connection.getCollection(meta.class);
if (Array.isArray(index.options) && index.options.length === 2 && properties.length === 0) {
res.push([collection.collectionName, collection.createIndex(index.options[0], index.options[1])]);
return;
}
if (index.options && properties.length === 0) {
res.push([collection.collectionName, collection.createIndex(index.options)]);
return;
}
if (index.type) {
if (index.type === 'fulltext') {
index.type = 'text';
}
const spec = {};
properties.forEach(prop => (spec[prop] = index.type));
fieldOrSpec = spec;
}
else {
fieldOrSpec = properties.reduce((o, i) => {
o[i] = 1;
return o;
}, {});
}
// MongoDB uses 'hidden' for invisible indexes
const indexOptions = {
name: index.name,
unique: false,
...index.options,
};
if (index.invisible) {
indexOptions.hidden = true;
}
res.push([collection.collectionName, this.executeQuery(collection, 'createIndex', fieldOrSpec, indexOptions)]);
});
return res;
}
async executeQuery(collection, method, ...args) {
const now = Date.now();
return collection[method](...args).then((res) => {
Utils.dropUndefinedProperties(args);
const query = `db.getCollection('${collection.collectionName}').${method}(${args.map(arg => inspect(arg)).join(', ')});`;
this.config.getLogger().logQuery({
level: 'info',
query,
took: Date.now() - now,
});
return res;
});
}
createUniqueIndexes(meta) {
const res = [];
meta.uniques.forEach(index => {
const properties = this.mapIndexProperties(index, meta);
const fieldOrSpec = properties.reduce((o, i) => {
o[i] = 1;
return o;
}, {});
const collection = this.connection.getCollection(meta.class);
res.push([
collection.collectionName,
this.executeQuery(collection, 'createIndex', fieldOrSpec, {
name: index.name,
unique: true,
...index.options,
}),
]);
});
return res;
}
createPropertyIndexes(meta, prop, type) {
if (!prop[type] || !meta.collection) {
return [];
}
const collection = this.connection.getCollection(meta.class);
const fieldOrSpec = prop.embeddedPath
? prop.embeddedPath.join('.')
: prop.fieldNames.reduce((o, i) => {
o[i] = 1;
return o;
}, {});
return [
[
collection.collectionName,
this.executeQuery(collection, 'createIndex', fieldOrSpec, {
name: typeof prop[type] === 'string' ? prop[type] : undefined,
unique: type === 'unique',
sparse: prop.nullable === true,
}),
],
];
}
const collection = this.connection.getCollection(meta.class);
const fieldOrSpec = prop.embeddedPath
? prop.embeddedPath.join('.')
: prop.fieldNames.reduce((o, i) => {
o[i] = 1;
return o;
}, {});
return [
[
collection.collectionName,
this.executeQuery(collection, 'createIndex', fieldOrSpec, {
name: typeof prop[type] === 'string' ? prop[type] : undefined,
unique: type === 'unique',
sparse: prop.nullable === true,
}),
],
];
}
}
{
"name": "@mikro-orm/mongodb",
"version": "7.0.3-dev.21",
"version": "7.0.3",
"description": "TypeScript ORM for Node.js based on Data Mapper, Unit of Work and Identity Map patterns. Supports MongoDB, MySQL, PostgreSQL and SQLite databases as well as usage with vanilla JavaScript.",

@@ -53,6 +53,6 @@ "keywords": [

"devDependencies": {
"@mikro-orm/core": "^7.0.2"
"@mikro-orm/core": "^7.0.3"
},
"peerDependencies": {
"@mikro-orm/core": "7.0.3-dev.21"
"@mikro-orm/core": "7.0.3"
},

@@ -59,0 +59,0 @@ "engines": {

@@ -136,3 +136,3 @@ <h1 align="center">

author.name = 'Jon Snow II';
author.books.getItems().forEach(book => book.title += ' (2nd ed.)');
author.books.getItems().forEach(book => (book.title += ' (2nd ed.)'));
author.books.add(orm.em.create(Book, { title: 'New Book', author }));

@@ -139,0 +139,0 @@