@naturalcycles/db-lib
Advanced tools
Comparing version 9.12.1 to 9.13.0
@@ -46,3 +46,3 @@ "use strict"; | ||
async saveFiles(ops) { | ||
await (0, js_lib_1.pMap)(ops, async (op) => await this.saveFile(op.table, op.rows), { concurrency: 16 }); | ||
await (0, js_lib_1.pMap)(ops, async (op) => await this.saveFile(op.table, op.rows), { concurrency: 32 }); | ||
} | ||
@@ -49,0 +49,0 @@ async saveFile(table, rows) { |
@@ -117,3 +117,3 @@ /// <reference types="node" /> | ||
* and then executing db.saveBatch(chunk) with the concurrency | ||
* of opt.chunkConcurrency (which defaults to 16). | ||
* of opt.chunkConcurrency (which defaults to 32). | ||
*/ | ||
@@ -120,0 +120,0 @@ streamSaveTransform(opt?: CommonDaoStreamSaveOptions<DBM>): Transform[]; |
@@ -702,3 +702,3 @@ "use strict"; | ||
* and then executing db.saveBatch(chunk) with the concurrency | ||
* of opt.chunkConcurrency (which defaults to 16). | ||
* of opt.chunkConcurrency (which defaults to 32). | ||
*/ | ||
@@ -715,3 +715,3 @@ streamSaveTransform(opt = {}) { | ||
const { beforeSave } = this.cfg.hooks; | ||
const { chunkSize = 500, chunkConcurrency = 16, errorMode } = opt; | ||
const { chunkSize = 500, chunkConcurrency = 32, errorMode } = opt; | ||
return [ | ||
@@ -785,3 +785,3 @@ (0, nodejs_lib_1.transformMap)(async (bm) => { | ||
if (opt.chunkSize) { | ||
const { chunkSize, chunkConcurrency = 16 } = opt; | ||
const { chunkSize, chunkConcurrency = 32 } = opt; | ||
await (0, nodejs_lib_1._pipeline)([ | ||
@@ -788,0 +788,0 @@ this.cfg.db.streamQuery(q.select(['id']), opt), |
@@ -260,3 +260,3 @@ import { BaseDBEntity, CommonLogger, ErrorMode, Promisable, ZodError, ZodSchema } from '@naturalcycles/js-lib'; | ||
* When chunkSize is set - this option controls how many chunks to run concurrently. | ||
* Defaults to 16, "the magic number of JavaScript concurrency". | ||
* Defaults to 32. | ||
*/ | ||
@@ -263,0 +263,0 @@ chunkConcurrency?: number; |
@@ -140,3 +140,3 @@ "use strict"; | ||
}, { | ||
concurrency: 16, | ||
concurrency: 32, | ||
}); | ||
@@ -158,3 +158,3 @@ } | ||
}, { | ||
concurrency: 16, | ||
concurrency: 32, | ||
}); | ||
@@ -161,0 +161,0 @@ } |
@@ -43,3 +43,3 @@ { | ||
}, | ||
"version": "9.12.1", | ||
"version": "9.13.0", | ||
"description": "Lowest Common Denominator API to supported Databases", | ||
@@ -46,0 +46,0 @@ "keywords": [ |
@@ -74,3 +74,3 @@ import fs from 'node:fs' | ||
async saveFiles(ops: DBSaveBatchOperation<any>[]): Promise<void> { | ||
await pMap(ops, async op => await this.saveFile(op.table, op.rows), { concurrency: 16 }) | ||
await pMap(ops, async op => await this.saveFile(op.table, op.rows), { concurrency: 32 }) | ||
} | ||
@@ -77,0 +77,0 @@ |
@@ -324,3 +324,3 @@ import { | ||
* When chunkSize is set - this option controls how many chunks to run concurrently. | ||
* Defaults to 16, "the magic number of JavaScript concurrency". | ||
* Defaults to 32. | ||
*/ | ||
@@ -327,0 +327,0 @@ chunkConcurrency?: number |
@@ -923,3 +923,3 @@ import { Transform } from 'node:stream' | ||
* and then executing db.saveBatch(chunk) with the concurrency | ||
* of opt.chunkConcurrency (which defaults to 16). | ||
* of opt.chunkConcurrency (which defaults to 32). | ||
*/ | ||
@@ -940,3 +940,3 @@ streamSaveTransform(opt: CommonDaoStreamSaveOptions<DBM> = {}): Transform[] { | ||
const { chunkSize = 500, chunkConcurrency = 16, errorMode } = opt | ||
const { chunkSize = 500, chunkConcurrency = 32, errorMode } = opt | ||
@@ -1024,3 +1024,3 @@ return [ | ||
if (opt.chunkSize) { | ||
const { chunkSize, chunkConcurrency = 16 } = opt | ||
const { chunkSize, chunkConcurrency = 32 } = opt | ||
@@ -1027,0 +1027,0 @@ await _pipeline([ |
@@ -227,3 +227,3 @@ import { AppError, CommonLogger, KeyValueTuple, pMap } from '@naturalcycles/js-lib' | ||
{ | ||
concurrency: 16, | ||
concurrency: 32, | ||
}, | ||
@@ -252,3 +252,3 @@ ) | ||
{ | ||
concurrency: 16, | ||
concurrency: 32, | ||
}, | ||
@@ -255,0 +255,0 @@ ) |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
420228