@naturalcycles/db-lib
Advanced tools
Comparing version 8.60.0 to 8.60.1
@@ -29,6 +29,6 @@ "use strict"; | ||
async loadFile(table) { | ||
await (0, nodejs_lib_1._ensureDir)(this.cfg.storagePath); | ||
await nodejs_lib_1.fs2.ensureDirAsync(this.cfg.storagePath); | ||
const ext = `ndjson${this.cfg.gzip ? '.gz' : ''}`; | ||
const filePath = `${this.cfg.storagePath}/${table}.${ext}`; | ||
if (!(await (0, nodejs_lib_1._pathExists)(filePath))) | ||
if (!(await nodejs_lib_1.fs2.pathExistsAsync(filePath))) | ||
return []; | ||
@@ -50,3 +50,3 @@ const transformUnzip = this.cfg.gzip ? [(0, node_zlib_1.createUnzip)()] : []; | ||
async saveFile(table, rows) { | ||
await (0, nodejs_lib_1._ensureDir)(this.cfg.storagePath); | ||
await nodejs_lib_1.fs2.ensureDirAsync(this.cfg.storagePath); | ||
const ext = `ndjson${this.cfg.gzip ? '.gz' : ''}`; | ||
@@ -53,0 +53,0 @@ const filePath = `${this.cfg.storagePath}/${table}.${ext}`; |
@@ -170,3 +170,3 @@ "use strict"; | ||
const started = Date.now(); | ||
await (0, nodejs_lib_1._emptyDir)(persistentStoragePath); | ||
await nodejs_lib_1.fs2.emptyDirAsync(persistentStoragePath); | ||
const transformZip = persistZip ? [(0, node_zlib_1.createGzip)()] : []; | ||
@@ -197,3 +197,3 @@ let tables = 0; | ||
const started = Date.now(); | ||
await (0, nodejs_lib_1._ensureDir)(persistentStoragePath); | ||
await nodejs_lib_1.fs2.ensureDirAsync(persistentStoragePath); | ||
this.data = {}; // empty it in the beginning! | ||
@@ -200,0 +200,0 @@ const files = (await promises_1.default.readdir(persistentStoragePath)).filter(f => f.includes('.ndjson')); |
@@ -34,3 +34,3 @@ import { JsonSchemaObject, JsonSchemaRootObject, ObjectWithId } from '@naturalcycles/js-lib'; | ||
*/ | ||
getByIds: <ROW extends ObjectWithId>(table: string, ids: ROW['id'][], opt?: CommonDBOptions) => Promise<ROW[]>; | ||
getByIds: <ROW extends ObjectWithId>(table: string, ids: string[], opt?: CommonDBOptions) => Promise<ROW[]>; | ||
/** | ||
@@ -37,0 +37,0 @@ * Order by 'id' is not supported by all implementations (for example, Datastore doesn't support it). |
@@ -26,3 +26,3 @@ "use strict"; | ||
console.log(`>> ${(0, nodejs_lib_1.dimWhite)('dbPipelineBackup')} started in ${(0, nodejs_lib_1.grey)(outputDirPath)}...`); | ||
(0, nodejs_lib_1._ensureDirSync)(outputDirPath); | ||
nodejs_lib_1.fs2.ensureDir(outputDirPath); | ||
tables ||= await db.getTables(); | ||
@@ -50,3 +50,3 @@ console.log(`${(0, nodejs_lib_1.yellow)(tables.length)} ${(0, nodejs_lib_1.boldWhite)('table(s)')}:\n` + tables.join('\n')); | ||
const schemaFilePath = `${outputDirPath}/${table}.schema.json`; | ||
if (protectFromOverwrite && (0, nodejs_lib_1._pathExistsSync)(filePath)) { | ||
if (protectFromOverwrite && nodejs_lib_1.fs2.pathExists(filePath)) { | ||
throw new js_lib_1.AppError(`dbPipelineBackup: output file exists: ${filePath}`); | ||
@@ -56,7 +56,7 @@ } | ||
let rows = 0; | ||
(0, nodejs_lib_1._ensureFileSync)(filePath); | ||
nodejs_lib_1.fs2.ensureFile(filePath); | ||
// console.log(`>> ${grey(filePath)} started...`) | ||
if (emitSchemaFromDB) { | ||
const schema = await db.getTableSchema(table); | ||
await (0, nodejs_lib_1._writeJson)(schemaFilePath, schema, { spaces: 2 }); | ||
await nodejs_lib_1.fs2.writeJsonAsync(schemaFilePath, schema, { spaces: 2 }); | ||
console.log(`>> ${(0, nodejs_lib_1.grey)(schemaFilePath)} saved (generated from DB)`); | ||
@@ -63,0 +63,0 @@ } |
@@ -22,3 +22,3 @@ "use strict"; | ||
console.log(`>> ${(0, nodejs_lib_1.dimWhite)('dbPipelineRestore')} started in ${(0, nodejs_lib_1.grey)(inputDirPath)}...${sinceUpdatedStr}`); | ||
(0, nodejs_lib_1._ensureDirSync)(inputDirPath); | ||
nodejs_lib_1.fs2.ensureDir(inputDirPath); | ||
const tablesToGzip = new Set(); | ||
@@ -58,3 +58,3 @@ const sizeByTable = {}; | ||
} | ||
const schema = await (0, nodejs_lib_1._readJson)(schemaFilePath); | ||
const schema = await nodejs_lib_1.fs2.readJsonAsync(schemaFilePath); | ||
await db.createTable(table, schema, { dropIfExists: true }); | ||
@@ -61,0 +61,0 @@ }); |
@@ -43,3 +43,3 @@ { | ||
}, | ||
"version": "8.60.0", | ||
"version": "8.60.1", | ||
"description": "Lowest Common Denominator API to supported Databases", | ||
@@ -46,0 +46,0 @@ "keywords": [ |
@@ -12,4 +12,3 @@ import fs from 'node:fs' | ||
_pipeline, | ||
_ensureDir, | ||
_pathExists, | ||
fs2, | ||
} from '@naturalcycles/nodejs-lib' | ||
@@ -54,7 +53,7 @@ import { DBSaveBatchOperation } from '../../db.model' | ||
async loadFile<ROW extends ObjectWithId>(table: string): Promise<ROW[]> { | ||
await _ensureDir(this.cfg.storagePath) | ||
await fs2.ensureDirAsync(this.cfg.storagePath) | ||
const ext = `ndjson${this.cfg.gzip ? '.gz' : ''}` | ||
const filePath = `${this.cfg.storagePath}/${table}.${ext}` | ||
if (!(await _pathExists(filePath))) return [] | ||
if (!(await fs2.pathExistsAsync(filePath))) return [] | ||
@@ -81,3 +80,3 @@ const transformUnzip = this.cfg.gzip ? [createUnzip()] : [] | ||
async saveFile<ROW extends ObjectWithId>(table: string, rows: ROW[]): Promise<void> { | ||
await _ensureDir(this.cfg.storagePath) | ||
await fs2.ensureDirAsync(this.cfg.storagePath) | ||
const ext = `ndjson${this.cfg.gzip ? '.gz' : ''}` | ||
@@ -84,0 +83,0 @@ const filePath = `${this.cfg.storagePath}/${table}.${ext}` |
@@ -28,6 +28,5 @@ import fs from 'node:fs' | ||
_pipeline, | ||
_emptyDir, | ||
_ensureDir, | ||
dimGrey, | ||
yellow, | ||
fs2, | ||
} from '@naturalcycles/nodejs-lib' | ||
@@ -287,3 +286,3 @@ import { CommonDB, DBIncrement, DBPatch, DBTransaction, queryInMemory } from '../..' | ||
await _emptyDir(persistentStoragePath) | ||
await fs2.emptyDirAsync(persistentStoragePath) | ||
@@ -323,3 +322,3 @@ const transformZip = persistZip ? [createGzip()] : [] | ||
await _ensureDir(persistentStoragePath) | ||
await fs2.ensureDirAsync(persistentStoragePath) | ||
@@ -326,0 +325,0 @@ this.data = {} // empty it in the beginning! |
@@ -53,3 +53,3 @@ import { JsonSchemaObject, JsonSchemaRootObject, ObjectWithId } from '@naturalcycles/js-lib' | ||
table: string, | ||
ids: ROW['id'][], | ||
ids: string[], | ||
opt?: CommonDBOptions, | ||
@@ -56,0 +56,0 @@ ) => Promise<ROW[]> |
@@ -23,6 +23,2 @@ import fs from 'node:fs' | ||
_pipeline, | ||
_ensureDirSync, | ||
_pathExistsSync, | ||
_ensureFileSync, | ||
_writeJson, | ||
boldWhite, | ||
@@ -32,2 +28,3 @@ dimWhite, | ||
yellow, | ||
fs2, | ||
} from '@naturalcycles/nodejs-lib' | ||
@@ -185,3 +182,3 @@ import { CommonDB } from '../common.db' | ||
_ensureDirSync(outputDirPath) | ||
fs2.ensureDir(outputDirPath) | ||
@@ -219,3 +216,3 @@ tables ||= await db.getTables() | ||
if (protectFromOverwrite && _pathExistsSync(filePath)) { | ||
if (protectFromOverwrite && fs2.pathExists(filePath)) { | ||
throw new AppError(`dbPipelineBackup: output file exists: ${filePath}`) | ||
@@ -227,3 +224,3 @@ } | ||
_ensureFileSync(filePath) | ||
fs2.ensureFile(filePath) | ||
@@ -234,3 +231,3 @@ // console.log(`>> ${grey(filePath)} started...`) | ||
const schema = await db.getTableSchema(table) | ||
await _writeJson(schemaFilePath, schema, { spaces: 2 }) | ||
await fs2.writeJsonAsync(schemaFilePath, schema, { spaces: 2 }) | ||
console.log(`>> ${grey(schemaFilePath)} saved (generated from DB)`) | ||
@@ -237,0 +234,0 @@ } |
@@ -28,4 +28,2 @@ import fs from 'node:fs' | ||
_pipeline, | ||
_ensureDirSync, | ||
_readJson, | ||
boldWhite, | ||
@@ -35,2 +33,3 @@ dimWhite, | ||
yellow, | ||
fs2, | ||
} from '@naturalcycles/nodejs-lib' | ||
@@ -150,3 +149,3 @@ import { CommonDB } from '../common.db' | ||
_ensureDirSync(inputDirPath) | ||
fs2.ensureDir(inputDirPath) | ||
@@ -191,3 +190,3 @@ const tablesToGzip = new Set<string>() | ||
const schema = await _readJson<JsonSchemaObject<any>>(schemaFilePath) | ||
const schema = await fs2.readJsonAsync<JsonSchemaObject<any>>(schemaFilePath) | ||
await db.createTable(table, schema, { dropIfExists: true }) | ||
@@ -194,0 +193,0 @@ }) |
418613
10923