@elastic.io/batching-library
Advanced tools
Comparing version 0.0.1-dev.8 to 0.0.1-dev.9
import { AbstractStore } from './storage/IStore'; | ||
import { Batch, BatchConfig, BatchItem, BatchStatus, Pair } from './entity/dto' ; | ||
import { Transform, Writable } from 'stream'; | ||
import { Batch, BatchConfig, BatchItem, BatchStatus } from './entity/dto' ; | ||
import componentLogger from '@elastic.io/component-logger'; | ||
import EventEmitter = NodeJS.EventEmitter; | ||
// import { Utils } from './utils'; | ||
@@ -22,4 +19,3 @@ export { BatchConfig, Pair, Batch, BatchStatus, BatchItem, batchStatusArr, IHash } from './entity/dto'; | ||
private store: AbstractStore; | ||
private config: BatchConfig; | ||
private emitter: EventEmitter; | ||
private readonly config: BatchConfig; | ||
@@ -32,10 +28,9 @@ /** | ||
*/ | ||
public constructor(store: AbstractStore, config: BatchConfig, emitter: EventEmitter) { | ||
public constructor(store: AbstractStore, config: BatchConfig) { | ||
this.store = store; | ||
this.config = config; | ||
this.emitter = emitter; | ||
} | ||
/** | ||
* Get 'READY' batches and locks them | ||
* Atomic Get 'READY' batches and locks them (set status tu 'LOCKED') | ||
*/ | ||
@@ -54,5 +49,5 @@ public async getReadyBatches(): Promise<Batch[]> { | ||
*/ | ||
public async updateBatchStatusById(batchId: string, status: BatchStatus): Promise<Batch[]> { | ||
await this.store.updateBatchStatusById(batchId, status); | ||
return await this.store.getAndLockBatches(); | ||
public async updateBatchStatusById(batchId: string, status: BatchStatus): Promise<Batch> { | ||
const updatedBatch = await this.store.updateBatchStatusById(batchId, status); | ||
return updatedBatch; | ||
} | ||
@@ -63,2 +58,3 @@ | ||
* Ensures that item can be added to batch. If cant add to existing create new batch. | ||
* Check batches after insett new item | ||
* @param item batch item | ||
@@ -70,80 +66,6 @@ * @return Batch with saved item. | ||
batch.items = batch.items.slice(batch.items.length - 1); | ||
this.LOG.debug('Starting to check batch: id: %s', batch.id); | ||
await this.store.checkBatch(batch.id, this.config); | ||
return batch; | ||
} | ||
/** | ||
* Get batches with provided status in form of stream. | ||
* @param status batch status. | ||
*/ | ||
public async getBatchesStreamByStatus(status: BatchStatus): Promise<Transform> { | ||
return await this.store.getAllInStatus(status); | ||
} | ||
/** | ||
* Emit each batch in stream to upper layer using provided emitter. | ||
*/ | ||
public getBatchEmitStream(): Writable { | ||
let chunkCounter: number = 0; | ||
return new Writable({ | ||
objectMode: true, | ||
write: (batch, _, cb) => { | ||
try { | ||
chunkCounter += 1; | ||
this.LOG.debug('Received chunk %d: %j', chunkCounter, batch); | ||
this.emitter.emit('data', batch); | ||
batch.status = 'SUCCESS'; | ||
cb(); | ||
} catch (e) { | ||
batch.status = 'FAILURE'; | ||
this.LOG.error('Error during processing chunk %j: %o', batch, e); | ||
cb(e); | ||
} | ||
this.store.update({ key: batch.id, value: batch }); | ||
}, | ||
}); | ||
} | ||
/** | ||
* Check each batch in stream over configuration. If they match conditions set them to status READY. | ||
*/ | ||
public getCheckBatchesStream(): Writable { | ||
let chunkCounter: number = 0; | ||
const updates: Pair<string, Batch>[] = []; | ||
function prepareForUpdate(batch: Batch): void { | ||
batch.status = 'READY'; | ||
updates.push({ key: batch.id, value: batch }); | ||
} | ||
return new Writable({ | ||
objectMode: true, | ||
write: (batch, _, cb) => { | ||
try { | ||
chunkCounter += 1; | ||
const { maxSize, maxWaitTime, maxItemsNumber } = this.config; | ||
if (batch.size >= maxSize) { | ||
this.LOG.debug(`Batch status will be changed to READY because condition: batch.size >= maxSize (${batch.size} >= ${maxSize})`); | ||
prepareForUpdate(batch); | ||
} else if (batch.items.length >= maxItemsNumber) { | ||
this.LOG.debug(`Batch status will be changed to READY because condition: batch.items.length >= maxItemsNumber (${batch.items.length} >= ${maxItemsNumber})`); | ||
prepareForUpdate(batch); | ||
} else if (batch.updateAt <= (new Date).getTime() - maxWaitTime) { | ||
this.LOG.debug(`Batch statu, { item: { msg: 4 }s will be changed to READY because condition: batch.updateAt >= (new Date).getTime() + maxWaitTime (${batch.updateAt} >= ${(new Date).getTime() + maxWaitTime})`); | ||
prepareForUpdate(batch); | ||
} | ||
cb(); | ||
} catch (e) { | ||
this.LOG.error('Error during processing chunk %j: %o', batch, e); | ||
cb(e); | ||
} | ||
}, | ||
final: async (cb) => { | ||
if (updates.length > 0) { | ||
await this.store.updateAll(updates); | ||
this.LOG.debug('Updated chunks %d: %j', chunkCounter, updates); | ||
} | ||
cb(); | ||
}, | ||
}); | ||
} | ||
} |
@@ -48,2 +48,4 @@ import { Batch, BatchConfig, BatchItem, BatchStatus, Pair } from '../entity/dto'; | ||
public abstract checkBatch(batchId: string, batchConfig: BatchConfig): Promise<Query<any>>; | ||
public abstract getAndLockBatches(): Promise<Batch[]>; | ||
@@ -50,0 +52,0 @@ |
@@ -183,2 +183,5 @@ import mongoose, { ClientSession, Connection, ConnectionOptions, Query } from 'mongoose'; | ||
/** | ||
* @inheritDoc | ||
*/ | ||
public async checkBatches(batchConfig: BatchConfig): Promise<Query<any>> { | ||
@@ -208,2 +211,37 @@ try { | ||
/** | ||
* @inheritDoc | ||
*/ | ||
public async checkBatch(batchId: string, batchConfig: BatchConfig): Promise<Query<any>> { | ||
try { | ||
const conn = await this.openConnection(); | ||
const model = await this.getModel(conn); | ||
this.LOG.debug('Start batch check'); | ||
const result = await model.updateOne({ | ||
status: 'OPEN', | ||
_id: batchId, | ||
$or: [ | ||
{ updatedAt: { $lte: new Date(new Date().getTime() - batchConfig.maxWaitTime) } }, | ||
{ itemsCount: { $gte: batchConfig.maxItemsNumber } }, | ||
{ size: { $gte: batchConfig.maxSize } }, | ||
], | ||
}, | ||
{ $set: { status: 'READY' } }, | ||
{ new: true }); | ||
if (result) { | ||
this.LOG.trace('Updating result %j', result); | ||
} else { | ||
this.LOG.debug('Batch is not ready'); | ||
} | ||
return result; | ||
} catch (e) { | ||
this.logUnexpectedError(e); | ||
throw e; | ||
} | ||
} | ||
/** | ||
* @inheritDoc | ||
*/ | ||
public async updateBatchStatusById(id: string, status: BatchStatus): Promise<Batch> { | ||
@@ -229,2 +267,5 @@ try { | ||
/** | ||
* @inheritDoc | ||
*/ | ||
public async getAndLockBatches(): Promise<Batch[]> { | ||
@@ -231,0 +272,0 @@ const conn = await this.openConnection(); |
@@ -125,2 +125,11 @@ import { Batch, BatchConfig, BatchItem, BatchStatus, Pair } from '../entity/dto'; | ||
/** | ||
* @async update batche status by batch Configuration. | ||
* | ||
* @param batchId of batch Configuration. | ||
* @param batchConfig of batch Configuration. | ||
* @return number(count) of updated objects. | ||
*/ | ||
checkBatch(batchId: string, batchConfig: BatchConfig): Promise<Query<any>>; | ||
/** | ||
* @async update batches status by batch Configuration. | ||
@@ -196,2 +205,4 @@ * | ||
public abstract checkBatch(batchId: string, batchConfig: BatchConfig): Promise<Query<any>>; | ||
public abstract updateBatchStatusById(id: string, status: BatchStatus): Promise<Batch>; | ||
@@ -198,0 +209,0 @@ |
@@ -1,7 +0,3 @@ | ||
/// <reference types="node" /> | ||
/// <reference types="mocha" /> | ||
import { AbstractStore } from './storage/IStore'; | ||
import { Batch, BatchConfig, BatchItem, BatchStatus } from './entity/dto'; | ||
import { Transform, Writable } from 'stream'; | ||
import EventEmitter = NodeJS.EventEmitter; | ||
export { BatchConfig, Pair, Batch, BatchStatus, BatchItem, batchStatusArr, IHash } from './entity/dto'; | ||
@@ -19,4 +15,3 @@ export { AbstractStore } from './storage/IStore'; | ||
private store; | ||
private config; | ||
private emitter; | ||
private readonly config; | ||
/** | ||
@@ -28,5 +23,5 @@ * @constructor creates BatchClient object. | ||
*/ | ||
constructor(store: AbstractStore, config: BatchConfig, emitter: EventEmitter); | ||
constructor(store: AbstractStore, config: BatchConfig); | ||
/** | ||
* Get 'READY' batches and locks them | ||
* Atomic Get 'READY' batches and locks them (set status tu 'LOCKED') | ||
*/ | ||
@@ -40,6 +35,7 @@ getReadyBatches(): Promise<Batch[]>; | ||
*/ | ||
updateBatchStatusById(batchId: string, status: BatchStatus): Promise<Batch[]>; | ||
updateBatchStatusById(batchId: string, status: BatchStatus): Promise<Batch>; | ||
/** | ||
* Save item to Batch. | ||
* Ensures that item can be added to batch. If cant add to existing create new batch. | ||
* Check batches after insett new item | ||
* @param item batch item | ||
@@ -49,16 +45,3 @@ * @return Batch with saved item. | ||
saveItem(item: BatchItem): Promise<Batch>; | ||
/** | ||
* Get batches with provided status in form of stream. | ||
* @param status batch status. | ||
*/ | ||
getBatchesStreamByStatus(status: BatchStatus): Promise<Transform>; | ||
/** | ||
* Emit each batch in stream to upper layer using provided emitter. | ||
*/ | ||
getBatchEmitStream(): Writable; | ||
/** | ||
* Check each batch in stream over configuration. If they match conditions set them to status READY. | ||
*/ | ||
getCheckBatchesStream(): Writable; | ||
} | ||
//# sourceMappingURL=client.d.ts.map |
@@ -6,5 +6,3 @@ "use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
const stream_1 = require("stream"); | ||
const component_logger_1 = __importDefault(require("@elastic.io/component-logger")); | ||
// import { Utils } from './utils'; | ||
var dto_1 = require("./entity/dto"); | ||
@@ -19,5 +17,5 @@ exports.BatchConfig = dto_1.BatchConfig; | ||
exports.MongoStore = MongoStore_1.MongoStore; | ||
var stream_2 = require("stream"); | ||
exports.Transform = stream_2.Transform; | ||
exports.Writable = stream_2.Writable; | ||
var stream_1 = require("stream"); | ||
exports.Transform = stream_1.Transform; | ||
exports.Writable = stream_1.Writable; | ||
var utils_1 = require("./utils"); | ||
@@ -36,10 +34,9 @@ exports.Utils = utils_1.Utils; | ||
*/ | ||
constructor(store, config, emitter) { | ||
constructor(store, config) { | ||
this.LOG = component_logger_1.default(); | ||
this.store = store; | ||
this.config = config; | ||
this.emitter = emitter; | ||
} | ||
/** | ||
* Get 'READY' batches and locks them | ||
* Atomic Get 'READY' batches and locks them (set status tu 'LOCKED') | ||
*/ | ||
@@ -58,4 +55,4 @@ async getReadyBatches() { | ||
async updateBatchStatusById(batchId, status) { | ||
await this.store.updateBatchStatusById(batchId, status); | ||
return await this.store.getAndLockBatches(); | ||
const updatedBatch = await this.store.updateBatchStatusById(batchId, status); | ||
return updatedBatch; | ||
} | ||
@@ -65,2 +62,3 @@ /** | ||
* Ensures that item can be added to batch. If cant add to existing create new batch. | ||
* Check batches after insett new item | ||
* @param item batch item | ||
@@ -72,81 +70,8 @@ * @return Batch with saved item. | ||
batch.items = batch.items.slice(batch.items.length - 1); | ||
this.LOG.debug('Starting to check batch: id: %s', batch.id); | ||
await this.store.checkBatch(batch.id, this.config); | ||
return batch; | ||
} | ||
/** | ||
* Get batches with provided status in form of stream. | ||
* @param status batch status. | ||
*/ | ||
async getBatchesStreamByStatus(status) { | ||
return await this.store.getAllInStatus(status); | ||
} | ||
/** | ||
* Emit each batch in stream to upper layer using provided emitter. | ||
*/ | ||
getBatchEmitStream() { | ||
let chunkCounter = 0; | ||
return new stream_1.Writable({ | ||
objectMode: true, | ||
write: (batch, _, cb) => { | ||
try { | ||
chunkCounter += 1; | ||
this.LOG.debug('Received chunk %d: %j', chunkCounter, batch); | ||
this.emitter.emit('data', batch); | ||
batch.status = 'SUCCESS'; | ||
cb(); | ||
} | ||
catch (e) { | ||
batch.status = 'FAILURE'; | ||
this.LOG.error('Error during processing chunk %j: %o', batch, e); | ||
cb(e); | ||
} | ||
this.store.update({ key: batch.id, value: batch }); | ||
}, | ||
}); | ||
} | ||
/** | ||
* Check each batch in stream over configuration. If they match conditions set them to status READY. | ||
*/ | ||
getCheckBatchesStream() { | ||
let chunkCounter = 0; | ||
const updates = []; | ||
function prepareForUpdate(batch) { | ||
batch.status = 'READY'; | ||
updates.push({ key: batch.id, value: batch }); | ||
} | ||
return new stream_1.Writable({ | ||
objectMode: true, | ||
write: (batch, _, cb) => { | ||
try { | ||
chunkCounter += 1; | ||
const { maxSize, maxWaitTime, maxItemsNumber } = this.config; | ||
if (batch.size >= maxSize) { | ||
this.LOG.debug(`Batch status will be changed to READY because condition: batch.size >= maxSize (${batch.size} >= ${maxSize})`); | ||
prepareForUpdate(batch); | ||
} | ||
else if (batch.items.length >= maxItemsNumber) { | ||
this.LOG.debug(`Batch status will be changed to READY because condition: batch.items.length >= maxItemsNumber (${batch.items.length} >= ${maxItemsNumber})`); | ||
prepareForUpdate(batch); | ||
} | ||
else if (batch.updateAt <= (new Date).getTime() - maxWaitTime) { | ||
this.LOG.debug(`Batch statu, { item: { msg: 4 }s will be changed to READY because condition: batch.updateAt >= (new Date).getTime() + maxWaitTime (${batch.updateAt} >= ${(new Date).getTime() + maxWaitTime})`); | ||
prepareForUpdate(batch); | ||
} | ||
cb(); | ||
} | ||
catch (e) { | ||
this.LOG.error('Error during processing chunk %j: %o', batch, e); | ||
cb(e); | ||
} | ||
}, | ||
final: async (cb) => { | ||
if (updates.length > 0) { | ||
await this.store.updateAll(updates); | ||
this.LOG.debug('Updated chunks %d: %j', chunkCounter, updates); | ||
} | ||
cb(); | ||
}, | ||
}); | ||
} | ||
} | ||
exports.BatchClient = BatchClient; | ||
//# sourceMappingURL=client.js.map |
@@ -28,2 +28,3 @@ /// <reference types="node" /> | ||
abstract checkBatches(batchConfig: BatchConfig): Promise<Query<any>>; | ||
abstract checkBatch(batchId: string, batchConfig: BatchConfig): Promise<Query<any>>; | ||
abstract getAndLockBatches(): Promise<Batch[]>; | ||
@@ -30,0 +31,0 @@ abstract updateBatchStatusById(id: string, status: BatchStatus): Promise<Batch>; |
@@ -68,4 +68,17 @@ /// <reference types="node" /> | ||
addItemToBatch(batchItem: BatchItem, batchConfig: BatchConfig): Promise<Batch>; | ||
/** | ||
* @inheritDoc | ||
*/ | ||
checkBatches(batchConfig: BatchConfig): Promise<Query<any>>; | ||
/** | ||
* @inheritDoc | ||
*/ | ||
checkBatch(batchId: string, batchConfig: BatchConfig): Promise<Query<any>>; | ||
/** | ||
* @inheritDoc | ||
*/ | ||
updateBatchStatusById(id: string, status: BatchStatus): Promise<Batch>; | ||
/** | ||
* @inheritDoc | ||
*/ | ||
getAndLockBatches(): Promise<Batch[]>; | ||
@@ -72,0 +85,0 @@ /** |
@@ -164,2 +164,5 @@ "use strict"; | ||
} | ||
/** | ||
* @inheritDoc | ||
*/ | ||
async checkBatches(batchConfig) { | ||
@@ -186,2 +189,35 @@ try { | ||
} | ||
/** | ||
* @inheritDoc | ||
*/ | ||
async checkBatch(batchId, batchConfig) { | ||
try { | ||
const conn = await this.openConnection(); | ||
const model = await this.getModel(conn); | ||
this.LOG.debug('Start batch check'); | ||
const result = await model.updateOne({ | ||
status: 'OPEN', | ||
_id: batchId, | ||
$or: [ | ||
{ updatedAt: { $lte: new Date(new Date().getTime() - batchConfig.maxWaitTime) } }, | ||
{ itemsCount: { $gte: batchConfig.maxItemsNumber } }, | ||
{ size: { $gte: batchConfig.maxSize } }, | ||
], | ||
}, { $set: { status: 'READY' } }, { new: true }); | ||
if (result) { | ||
this.LOG.trace('Updating result %j', result); | ||
} | ||
else { | ||
this.LOG.debug('Batch is not ready'); | ||
} | ||
return result; | ||
} | ||
catch (e) { | ||
this.logUnexpectedError(e); | ||
throw e; | ||
} | ||
} | ||
/** | ||
* @inheritDoc | ||
*/ | ||
async updateBatchStatusById(id, status) { | ||
@@ -203,2 +239,5 @@ try { | ||
} | ||
/** | ||
* @inheritDoc | ||
*/ | ||
async getAndLockBatches() { | ||
@@ -205,0 +244,0 @@ const conn = await this.openConnection(); |
@@ -112,2 +112,10 @@ /// <reference types="node" /> | ||
/** | ||
* @async update batche status by batch Configuration. | ||
* | ||
* @param batchId of batch Configuration. | ||
* @param batchConfig of batch Configuration. | ||
* @return number(count) of updated objects. | ||
*/ | ||
checkBatch(batchId: string, batchConfig: BatchConfig): Promise<Query<any>>; | ||
/** | ||
* @async update batches status by batch Configuration. | ||
@@ -170,2 +178,3 @@ * | ||
abstract checkBatches(batchConfig: BatchConfig): Promise<Query<any>>; | ||
abstract checkBatch(batchId: string, batchConfig: BatchConfig): Promise<Query<any>>; | ||
abstract updateBatchStatusById(id: string, status: BatchStatus): Promise<Batch>; | ||
@@ -172,0 +181,0 @@ abstract getAndLockBatches(): Promise<Batch[]>; |
{ | ||
"name": "@elastic.io/batching-library", | ||
"version": "0.0.1-dev.8", | ||
"version": "0.0.1-dev.9", | ||
"description": "Library for batching implementation on elastic.io", | ||
@@ -19,2 +19,3 @@ "homepage": "https://github.com/elasticio/batching-library#readme", | ||
"pretest": "tslint -c tslint.json --project tsconfig.json lib/**/*.ts test/**/*.ts", | ||
"preparepublish": "npm run tsc", | ||
"posttest": "npm run tsc", | ||
@@ -21,0 +22,0 @@ "test": "mocha --exit --require ts-node/register test/spec/**/*.ts", |
@@ -10,7 +10,6 @@  | ||
#### `new BatchClient(store, config, emitter)` | ||
#### `new BatchClient(store, config)` | ||
BatchClient constructor expects three parameters: | ||
1. store - implementation of [IStore interface](lib/storage/IStore.ts). For example [MongoStore](lib/storage/impl/MongoStore.ts). This is where batches will be stored. | ||
2. config - instance of [BatchConfig](lib/entity/dto.ts). It represent configuration and defines limitation for batches e.g: maxSize, maxItemsNumber. | ||
3. emitter - implementation of Node.Js EventEmitter. Client use provided emmiter to communicate with upper layer. | ||
##### Example | ||
@@ -36,18 +35,38 @@ ```typescript | ||
const config = new BatchConfig(maxSize, maxItemsNumber, maxWaitTime, maxEmitRate, maxRetry); | ||
const emitter: any = new MyEmitter(); | ||
const client = new BatchClient(mgStore, config, emitter); | ||
const client = new BatchClient(mgStore, config); | ||
``` | ||
### Methods | ||
#### 1. `emitReadyBatches()` | ||
Using provided emitter emit batches with status 'READY' | ||
#### 1. `saveItem(item)` | ||
Save provided item to batch, returns batch with saved item (without another Batch items). | ||
##### BatchItem properties | ||
|Parameter|Type|Required|Description| | ||
|---------|----|--------|-----------| | ||
|id|string|false| If not specified - uuid v1 will be generated| | ||
|item|string|true| Body of the Batch Item| | ||
##### Example | ||
```typescript | ||
await client.emitReadyBatches(); | ||
const batch: Batch = await client.saveItem({ id: 0, item: {}}); | ||
``` | ||
#### 2. `saveItem(item)` | ||
Save provided item to batch, returns batch with saved item. | ||
#### 2. `getReadyBatches()` | ||
Using provided emitter emit batches with status 'READY' | ||
##### Example | ||
```typescript | ||
batch: Batch = await client.saveItem({ id: 0, item: {}}); | ||
``` | ||
const batches: Batch[] = await client.getReadyBatches(); | ||
await Promise.all(batches.map(async (batch) => { | ||
try { | ||
// process batch implementation | ||
await client.updateBatchStatusById(batch.id, 'SUCCESS'); // batch was successfully processed | ||
} catch (e) { | ||
log.error('Error: %o', e); | ||
await client.updateBatchStatusById(batch.id, 'FAILED'); // batch processed with error | ||
} | ||
})); | ||
``` | ||
# Implementing your own batch storage | ||
@@ -83,1 +102,4 @@ | ||
3. Emit rate not implemented. | ||
4. Library is not guarantee sequence processing of batch item | ||
. |
import { MongoStore } from '../../lib/storage/impl/MongoStore'; | ||
import { BatchClient } from '../../lib/client'; | ||
import { BatchItem, BatchConfig } from '../../lib/entity/dto'; | ||
import sinon from 'sinon'; | ||
import { expect } from 'chai'; | ||
@@ -12,3 +11,2 @@ import 'mocha'; | ||
let mgStore: MongoStore; | ||
let emitter: any; | ||
@@ -32,5 +30,2 @@ const uri: string = process.env.MONGO_URL || ''; | ||
beforeEach(async () => { | ||
emitter = { | ||
emit: sinon.spy(), | ||
}; | ||
if (dropDB) { | ||
@@ -50,3 +45,3 @@ const db = await mgStore.openConnection(); | ||
const conf = new BatchConfig(50000, 3, 60000); | ||
const client = new BatchClient(mgStore, conf, emitter); | ||
const client = new BatchClient(mgStore, conf); | ||
const items = [ | ||
@@ -66,3 +61,3 @@ { num: { msg: 1 } }, | ||
const conf = new BatchConfig(50000, 3, -1000); | ||
const client = new BatchClient(mgStore, conf, emitter); | ||
const client = new BatchClient(mgStore, conf); | ||
const items = [ | ||
@@ -82,3 +77,3 @@ { num: { msg: 1 } }, | ||
const conf = new BatchConfig(72, 30, 60000); | ||
const client = new BatchClient(mgStore, conf, emitter); | ||
const client = new BatchClient(mgStore, conf); | ||
const items = [ | ||
@@ -102,3 +97,3 @@ { num: { msg: 1 } }, | ||
const conf = new BatchConfig(9999, 2, 60000); | ||
const client = new BatchClient(mgStore, conf, emitter); | ||
const client = new BatchClient(mgStore, conf); | ||
const items = [ | ||
@@ -120,3 +115,3 @@ { num: { msg: 1 } }, | ||
const conf = new BatchConfig(72, 30, 60000); | ||
const client = new BatchClient(mgStore, conf, emitter); | ||
const client = new BatchClient(mgStore, conf); | ||
const items = [ | ||
@@ -143,3 +138,3 @@ { num: { msg: 1 } }, | ||
const conf = new BatchConfig(72, 30, -1000); | ||
const client = new BatchClient(mgStore, conf, emitter); | ||
const client = new BatchClient(mgStore, conf); | ||
const items = [ | ||
@@ -146,0 +141,0 @@ { num: { msg: 1 } }, |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
103
164118
3160