Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@naturalcycles/db-lib

Package Overview
Dependencies
Maintainers
2
Versions
301
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@naturalcycles/db-lib - npm Package Compare versions

Comparing version 9.13.0 to 9.14.0

25

dist/adapter/file/localFile.persistence.plugin.js
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.LocalFilePersistencePlugin = void 0;
const tslib_1 = require("tslib");
const node_fs_1 = tslib_1.__importDefault(require("node:fs"));
const promises_1 = tslib_1.__importDefault(require("node:fs/promises"));
const node_stream_1 = require("node:stream");
const node_zlib_1 = require("node:zlib");
const js_lib_1 = require("@naturalcycles/js-lib");

@@ -24,3 +20,3 @@ const nodejs_lib_1 = require("@naturalcycles/nodejs-lib");

async getTables() {
return (await promises_1.default.readdir(this.cfg.storagePath))
return (await nodejs_lib_1.fs2.readdirAsync(this.cfg.storagePath))
.filter(f => f.includes('.ndjson'))

@@ -35,12 +31,3 @@ .map(f => f.split('.ndjson')[0]);

return [];
const transformUnzip = this.cfg.gzip ? [(0, node_zlib_1.createUnzip)()] : [];
const rows = [];
await (0, nodejs_lib_1._pipeline)([
node_fs_1.default.createReadStream(filePath),
...transformUnzip,
(0, nodejs_lib_1.transformSplit)(), // splits by \n
(0, nodejs_lib_1.transformJsonParse)(),
(0, nodejs_lib_1.writablePushToArray)(rows),
]);
return rows;
return await nodejs_lib_1.fs2.createReadStreamAsNDJSON(filePath).toArray();
}

@@ -54,11 +41,5 @@ async saveFiles(ops) {

const filePath = `${this.cfg.storagePath}/${table}.${ext}`;
const transformZip = this.cfg.gzip ? [(0, node_zlib_1.createGzip)()] : [];
await (0, nodejs_lib_1._pipeline)([
node_stream_1.Readable.from(rows),
(0, nodejs_lib_1.transformToNDJson)(),
...transformZip,
node_fs_1.default.createWriteStream(filePath),
]);
await (0, nodejs_lib_1._pipeline)([node_stream_1.Readable.from(rows), ...nodejs_lib_1.fs2.createWriteStreamAsNDJSON(filePath)]);
}
}
exports.LocalFilePersistencePlugin = LocalFilePersistencePlugin;

2

dist/adapter/inmemory/inMemory.db.d.ts

@@ -32,3 +32,3 @@ import { JsonSchemaObject, StringMap, JsonSchemaRootObject, ObjectWithId, CommonLogger } from '@naturalcycles/js-lib';

/**
* @default ./tmp/inmemorydb
* @default ./tmp/inmemorydb.ndjson.gz
*

@@ -35,0 +35,0 @@ * Will store one ndjson file per table.

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.InMemoryDBTransaction = exports.InMemoryDB = void 0;
const tslib_1 = require("tslib");
const node_fs_1 = tslib_1.__importDefault(require("node:fs"));
const promises_1 = tslib_1.__importDefault(require("node:fs/promises"));
const node_stream_1 = require("node:stream");
const node_zlib_1 = require("node:zlib");
const js_lib_1 = require("@naturalcycles/js-lib");

@@ -172,3 +168,2 @@ const nodejs_lib_1 = require("@naturalcycles/nodejs-lib");

await nodejs_lib_1.fs2.emptyDirAsync(persistentStoragePath);
const transformZip = persistZip ? [(0, node_zlib_1.createGzip)()] : [];
let tables = 0;

@@ -182,8 +177,3 @@ // infinite concurrency for now

const fname = `${persistentStoragePath}/${table}.ndjson${persistZip ? '.gz' : ''}`;
await (0, nodejs_lib_1._pipeline)([
node_stream_1.Readable.from(rows),
(0, nodejs_lib_1.transformToNDJson)(),
...transformZip,
node_fs_1.default.createWriteStream(fname),
]);
await (0, nodejs_lib_1._pipeline)([node_stream_1.Readable.from(rows), ...nodejs_lib_1.fs2.createWriteStreamAsNDJSON(fname)]);
});

@@ -201,3 +191,3 @@ this.cfg.logger.log(`flushToDisk took ${(0, nodejs_lib_1.dimGrey)((0, js_lib_1._since)(started))} to save ${(0, nodejs_lib_1.yellow)(tables)} tables`);

this.data = {}; // empty it in the beginning!
const files = (await promises_1.default.readdir(persistentStoragePath)).filter(f => f.includes('.ndjson'));
const files = (await nodejs_lib_1.fs2.readdirAsync(persistentStoragePath)).filter(f => f.includes('.ndjson'));
// infinite concurrency for now

@@ -207,11 +197,3 @@ await (0, js_lib_1.pMap)(files, async (file) => {

const table = file.split('.ndjson')[0];
const transformUnzip = file.endsWith('.gz') ? [(0, node_zlib_1.createUnzip)()] : [];
const rows = [];
await (0, nodejs_lib_1._pipeline)([
node_fs_1.default.createReadStream(fname),
...transformUnzip,
(0, nodejs_lib_1.transformSplit)(), // splits by \n
(0, nodejs_lib_1.transformJsonParse)(),
(0, nodejs_lib_1.writablePushToArray)(rows),
]);
const rows = await nodejs_lib_1.fs2.createReadStreamAsNDJSON(fname).toArray();
this.data[table] = (0, js_lib_1._by)(rows, r => r.id);

@@ -218,0 +200,0 @@ });

@@ -426,4 +426,3 @@ "use strict";

await (0, nodejs_lib_1._pipeline)([
this.cfg.db.streamQuery(q.select(['id']), opt),
(0, nodejs_lib_1.transformMapSimple)(r => {
this.cfg.db.streamQuery(q.select(['id']), opt).map(r => {
count++;

@@ -786,9 +785,7 @@ return r.id;

await (0, nodejs_lib_1._pipeline)([
this.cfg.db.streamQuery(q.select(['id']), opt),
(0, nodejs_lib_1.transformMapSimple)(r => r.id, {
errorMode: js_lib_1.ErrorMode.SUPPRESS,
}),
this.cfg.db.streamQuery(q.select(['id']), opt).map(r => r.id),
(0, nodejs_lib_1.transformChunk)({ chunkSize }),
(0, nodejs_lib_1.transformMap)(async (ids) => {
deleted += await this.cfg.db.deleteByQuery(dbQuery_1.DBQuery.create(q.table).filterIn('id', ids), opt);
await this.cfg.db.deleteByIds(q.table, ids, opt);
deleted += ids.length;
}, {

@@ -795,0 +792,0 @@ predicate: js_lib_1._passthroughPredicate,

@@ -1,3 +0,1 @@

/// <reference types="node" />
import { ZlibOptions } from 'node:zlib';
import { AsyncMapper, ErrorMode, UnixTimestampNumber, StringMap } from '@naturalcycles/js-lib';

@@ -68,4 +66,4 @@ import { NDJsonStats, TransformLogProgressOptions, TransformMapOptions } from '@naturalcycles/nodejs-lib';

* Only applicable if `gzip` is enabled
* Currently not available.
*/
zlibOptions?: ZlibOptions;
/**

@@ -100,6 +98,2 @@ * Optionally you can provide mapper that is going to run for each table.

emitSchemaFromDB?: boolean;
/**
* @default false
*/
sortObjects?: boolean;
}

@@ -106,0 +100,0 @@ /**

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.dbPipelineBackup = void 0;
const tslib_1 = require("tslib");
const node_fs_1 = tslib_1.__importDefault(require("node:fs"));
const promises_1 = tslib_1.__importDefault(require("node:fs/promises"));
const node_zlib_1 = require("node:zlib");
const js_lib_1 = require("@naturalcycles/js-lib");

@@ -21,4 +17,3 @@ const nodejs_lib_1 = require("@naturalcycles/nodejs-lib");

async function dbPipelineBackup(opt) {
const { db, concurrency = 16, limit = 0, outputDirPath, protectFromOverwrite = false, zlibOptions, mapperPerTable = {}, queryPerTable = {}, logEveryPerTable = {}, transformMapOptions, errorMode = js_lib_1.ErrorMode.SUPPRESS, emitSchemaFromDB = false, sortObjects = false, } = opt;
const strict = errorMode !== js_lib_1.ErrorMode.SUPPRESS;
const { db, concurrency = 16, limit = 0, outputDirPath, protectFromOverwrite = false, mapperPerTable = {}, queryPerTable = {}, logEveryPerTable = {}, transformMapOptions, errorMode = js_lib_1.ErrorMode.SUPPRESS, emitSchemaFromDB = false, } = opt;
const gzip = opt.gzip !== false; // default to true

@@ -78,7 +73,5 @@ let { tables } = opt;

}),
(0, nodejs_lib_1.transformToNDJson)({ strict, sortObjects }),
...(gzip ? [(0, node_zlib_1.createGzip)(zlibOptions)] : []), // optional gzip
node_fs_1.default.createWriteStream(filePath),
...nodejs_lib_1.fs2.createWriteStreamAsNDJSON(filePath),
]);
const { size: sizeBytes } = await promises_1.default.stat(filePath);
const { size: sizeBytes } = await nodejs_lib_1.fs2.statAsync(filePath);
const stats = nodejs_lib_1.NDJsonStats.create({

@@ -85,0 +78,0 @@ tookMillis: Date.now() - started,

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.dbPipelineRestore = void 0;
const tslib_1 = require("tslib");
const node_fs_1 = tslib_1.__importDefault(require("node:fs"));
const node_zlib_1 = require("node:zlib");
const js_lib_1 = require("@naturalcycles/js-lib");

@@ -18,3 +15,2 @@ const nodejs_lib_1 = require("@naturalcycles/nodejs-lib");

const { db, concurrency = 16, chunkSize = 100, limit, sinceUpdated, inputDirPath, mapperPerTable = {}, saveOptionsPerTable = {}, transformMapOptions, errorMode = js_lib_1.ErrorMode.SUPPRESS, recreateTables = false, } = opt;
const strict = errorMode !== js_lib_1.ErrorMode.SUPPRESS;
const onlyTables = opt.tables && new Set(opt.tables);

@@ -28,3 +24,3 @@ const sinceUpdatedStr = sinceUpdated ? ' since ' + (0, nodejs_lib_1.grey)((0, js_lib_1.localTime)(sinceUpdated).toPretty()) : '';

const tables = [];
node_fs_1.default.readdirSync(inputDirPath).forEach(f => {
nodejs_lib_1.fs2.readdir(inputDirPath).forEach(f => {
let table;

@@ -47,3 +43,3 @@ let gzip = false;

tablesToGzip.add(table);
sizeByTable[table] = node_fs_1.default.statSync(`${inputDirPath}/${f}`).size;
sizeByTable[table] = nodejs_lib_1.fs2.stat(`${inputDirPath}/${f}`).size;
});

@@ -56,3 +52,3 @@ const sizeStrByTable = (0, js_lib_1._mapValues)(sizeByTable, (_k, b) => (0, js_lib_1._hb)(b));

const schemaFilePath = `${inputDirPath}/${table}.schema.json`;
if (!node_fs_1.default.existsSync(schemaFilePath)) {
if (!nodejs_lib_1.fs2.pathExists(schemaFilePath)) {
console.warn(`${schemaFilePath} does not exist!`);

@@ -74,6 +70,3 @@ return;

await (0, nodejs_lib_1._pipeline)([
node_fs_1.default.createReadStream(filePath),
...(gzip ? [(0, node_zlib_1.createUnzip)()] : []),
(0, nodejs_lib_1.transformSplit)(), // splits by \n
(0, nodejs_lib_1.transformJsonParse)({ strict }),
nodejs_lib_1.fs2.createReadStreamAsNDJSON(filePath).take(limit || Number.POSITIVE_INFINITY),
(0, nodejs_lib_1.transformTap)(() => rows++),

@@ -85,3 +78,2 @@ (0, nodejs_lib_1.transformLogProgress)({

}),
(0, nodejs_lib_1.transformLimit)({ limit }),
...(sinceUpdated

@@ -88,0 +80,0 @@ ? [(0, nodejs_lib_1.transformFilterSync)(r => r.updated >= sinceUpdated)]

@@ -43,3 +43,3 @@ {

},
"version": "9.13.0",
"version": "9.14.0",
"description": "Lowest Common Denominator API to supported Databases",

@@ -46,0 +46,0 @@ "keywords": [

@@ -1,14 +0,4 @@

import fs from 'node:fs'
import fsp from 'node:fs/promises'
import { Readable } from 'node:stream'
import { createGzip, createUnzip } from 'node:zlib'
import { ObjectWithId, pMap } from '@naturalcycles/js-lib'
import {
transformJsonParse,
transformSplit,
transformToNDJson,
writablePushToArray,
_pipeline,
fs2,
} from '@naturalcycles/nodejs-lib'
import { _pipeline, fs2 } from '@naturalcycles/nodejs-lib'
import { DBSaveBatchOperation } from '../../db.model'

@@ -46,3 +36,3 @@ import { FileDBPersistencePlugin } from './file.db.model'

async getTables(): Promise<string[]> {
return (await fsp.readdir(this.cfg.storagePath))
return (await fs2.readdirAsync(this.cfg.storagePath))
.filter(f => f.includes('.ndjson'))

@@ -59,15 +49,3 @@ .map(f => f.split('.ndjson')[0]!)

const transformUnzip = this.cfg.gzip ? [createUnzip()] : []
const rows: ROW[] = []
await _pipeline([
fs.createReadStream(filePath),
...transformUnzip,
transformSplit(), // splits by \n
transformJsonParse(),
writablePushToArray(rows),
])
return rows
return await fs2.createReadStreamAsNDJSON(filePath).toArray()
}

@@ -83,11 +61,5 @@

const filePath = `${this.cfg.storagePath}/${table}.${ext}`
const transformZip = this.cfg.gzip ? [createGzip()] : []
await _pipeline([
Readable.from(rows),
transformToNDJson(),
...transformZip,
fs.createWriteStream(filePath),
])
await _pipeline([Readable.from(rows), ...fs2.createWriteStreamAsNDJSON(filePath)])
}
}

@@ -1,5 +0,2 @@

import fs from 'node:fs'
import fsp from 'node:fs/promises'
import { Readable } from 'node:stream'
import { createGzip, createUnzip } from 'node:zlib'
import {

@@ -23,6 +20,2 @@ generateJsonSchemaFromData,

ReadableTyped,
transformJsonParse,
transformSplit,
transformToNDJson,
writablePushToArray,
_pipeline,

@@ -82,3 +75,3 @@ dimGrey,

/**
* @default ./tmp/inmemorydb
* @default ./tmp/inmemorydb.ndjson.gz
*

@@ -317,3 +310,2 @@ * Will store one ndjson file per table.

const transformZip = persistZip ? [createGzip()] : []
let tables = 0

@@ -329,8 +321,3 @@

await _pipeline([
Readable.from(rows),
transformToNDJson(),
...transformZip,
fs.createWriteStream(fname),
])
await _pipeline([Readable.from(rows), ...fs2.createWriteStreamAsNDJSON(fname)])
})

@@ -356,3 +343,3 @@

const files = (await fsp.readdir(persistentStoragePath)).filter(f => f.includes('.ndjson'))
const files = (await fs2.readdirAsync(persistentStoragePath)).filter(f => f.includes('.ndjson'))

@@ -364,14 +351,4 @@ // infinite concurrency for now

const transformUnzip = file.endsWith('.gz') ? [createUnzip()] : []
const rows = await fs2.createReadStreamAsNDJSON(fname).toArray()
const rows: any[] = []
await _pipeline([
fs.createReadStream(fname),
...transformUnzip,
transformSplit(), // splits by \n
transformJsonParse(),
writablePushToArray(rows),
])
this.data[table] = _by(rows, r => r.id)

@@ -378,0 +355,0 @@ })

@@ -43,3 +43,2 @@ import { Transform } from 'node:stream'

transformMap,
transformMapSimple,
transformNoOp,

@@ -584,4 +583,3 @@ writableVoid,

await _pipeline([
this.cfg.db.streamQuery<DBM>(q.select(['id']), opt),
transformMapSimple<DBM, string>(r => {
this.cfg.db.streamQuery<DBM>(q.select(['id']), opt).map(r => {
count++

@@ -1027,13 +1025,8 @@ return r.id

await _pipeline([
this.cfg.db.streamQuery<DBM>(q.select(['id']), opt),
transformMapSimple<ObjectWithId, string>(r => r.id, {
errorMode: ErrorMode.SUPPRESS,
}),
this.cfg.db.streamQuery<DBM>(q.select(['id']), opt).map(r => r.id),
transformChunk<string>({ chunkSize }),
transformMap<string[], void>(
async ids => {
deleted += await this.cfg.db.deleteByQuery(
DBQuery.create(q.table).filterIn('id', ids),
opt,
)
await this.cfg.db.deleteByIds(q.table, ids, opt)
deleted += ids.length
},

@@ -1040,0 +1033,0 @@ {

@@ -1,4 +0,1 @@

import fs from 'node:fs'
import fsp from 'node:fs/promises'
import { createGzip, ZlibOptions } from 'node:zlib'
import {

@@ -21,3 +18,2 @@ AppError,

transformTap,
transformToNDJson,
_pipeline,

@@ -105,4 +101,5 @@ boldWhite,

* Only applicable if `gzip` is enabled
* Currently not available.
*/
zlibOptions?: ZlibOptions
// zlibOptions?: ZlibOptions

@@ -143,7 +140,2 @@ /**

emitSchemaFromDB?: boolean
/**
* @default false
*/
sortObjects?: boolean
}

@@ -167,3 +159,2 @@

protectFromOverwrite = false,
zlibOptions,
mapperPerTable = {},

@@ -175,5 +166,3 @@ queryPerTable = {},

emitSchemaFromDB = false,
sortObjects = false,
} = opt
const strict = errorMode !== ErrorMode.SUPPRESS
const gzip = opt.gzip !== false // default to true

@@ -251,8 +240,6 @@

}),
transformToNDJson({ strict, sortObjects }),
...(gzip ? [createGzip(zlibOptions)] : []), // optional gzip
fs.createWriteStream(filePath),
...fs2.createWriteStreamAsNDJSON(filePath),
])
const { size: sizeBytes } = await fsp.stat(filePath)
const { size: sizeBytes } = await fs2.statAsync(filePath)

@@ -259,0 +246,0 @@ const stats = NDJsonStats.create({

@@ -1,3 +0,1 @@

import fs from 'node:fs'
import { createUnzip } from 'node:zlib'
import {

@@ -18,4 +16,2 @@ AsyncMapper,

transformFilterSync,
transformJsonParse,
transformLimit,
transformLogProgress,

@@ -25,3 +21,2 @@ TransformLogProgressOptions,

TransformMapOptions,
transformSplit,
transformTap,

@@ -140,3 +135,2 @@ writableForEach,

} = opt
const strict = errorMode !== ErrorMode.SUPPRESS
const onlyTables = opt.tables && new Set(opt.tables)

@@ -156,3 +150,3 @@

const tables: string[] = []
fs.readdirSync(inputDirPath).forEach(f => {
fs2.readdir(inputDirPath).forEach(f => {
let table: string

@@ -174,3 +168,3 @@ let gzip = false

if (gzip) tablesToGzip.add(table)
sizeByTable[table] = fs.statSync(`${inputDirPath}/${f}`).size
sizeByTable[table] = fs2.stat(`${inputDirPath}/${f}`).size
})

@@ -187,3 +181,3 @@

const schemaFilePath = `${inputDirPath}/${table}.schema.json`
if (!fs.existsSync(schemaFilePath)) {
if (!fs2.pathExists(schemaFilePath)) {
console.warn(`${schemaFilePath} does not exist!`)

@@ -213,6 +207,3 @@ return

await _pipeline([
fs.createReadStream(filePath),
...(gzip ? [createUnzip()] : []),
transformSplit(), // splits by \n
transformJsonParse({ strict }),
fs2.createReadStreamAsNDJSON(filePath).take(limit || Number.POSITIVE_INFINITY),
transformTap(() => rows++),

@@ -224,3 +215,2 @@ transformLogProgress({

}),
transformLimit({ limit }),
...(sinceUpdated

@@ -227,0 +217,0 @@ ? [transformFilterSync<BaseDBEntity>(r => r.updated >= sinceUpdated)]

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc