Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@yornaath/batshit

Package Overview
Dependencies
Maintainers
1
Versions
24
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@yornaath/batshit - npm Package Compare versions

Comparing version 0.8.0 to 0.9.0

25

dist/index.d.ts

@@ -65,4 +65,8 @@ /// <reference types="node" />

*/
(start: number, latest: number): number;
(start: number, latest: number, batchSize: number): Schedule;
};
/**
* A schedule for when to execute a batched fetch call.
*/
export type Schedule = number | "immediate" | "never";
export type BatcherMemory<T, Q> = {

@@ -117,2 +121,21 @@ seq: number;

export declare const bufferScheduler: (ms: number) => BatcherScheduler;
/**
* Same as windowScheduler, will batch calls made within a window of time OR when the max batch size is reached.
*
* @param config: {windowMs: number; maxBatchSize: number;}
* @returns BatcherScheduler
*/
export declare const windowedFiniteBatchScheduler: (config: {
windowMs: number;
maxBatchSize: number;
}) => BatcherScheduler;
/**
* Will batch calls when the max batch size is reached.
*
* @param config: {maxBatchSize: number;}
* @returns BatcherScheduler
*/
export declare const maxBatchSizeScheduler: (config: {
maxBatchSize: number;
}) => BatcherScheduler;
//# sourceMappingURL=index.d.ts.map

@@ -26,2 +26,9 @@ import { deferred } from './deferred.esm.js';

devtools?.create({ seq: mem.seq });
const nextBatch = () => {
mem.batch = new Set();
mem.currentRequest = deferred();
mem.timer = undefined;
mem.start = null;
mem.latest = null;
};
const fetch = (query) => {

@@ -33,3 +40,3 @@ if (!mem.start)

clearTimeout(mem.timer);
const scheduled = scheduler(mem.start, mem.latest);
const scheduled = scheduler(mem.start, mem.latest, mem.batch.size);
devtools?.queue({

@@ -43,3 +50,3 @@ seq: mem.seq,

});
mem.timer = setTimeout(() => {
const fetchBatch = () => {
const currentSeq = mem.seq;

@@ -49,7 +56,3 @@ const req = config.fetcher([...mem.batch]);

devtools?.fetch({ seq: currentSeq, batch: [...mem.batch] });
mem.batch = new Set();
mem.currentRequest = deferred();
mem.timer = undefined;
mem.start = null;
mem.latest = null;
nextBatch();
req

@@ -65,4 +68,16 @@ .then((data) => {

mem.seq++;
}, scheduled);
return mem.currentRequest.value.then((items) => config.resolver(items, query));
return req;
};
if (scheduled === "immediate") {
const req = mem.currentRequest;
fetchBatch();
return req.value.then((items) => config.resolver(items, query));
}
else if (scheduled === "never") {
return mem.currentRequest.value.then((items) => config.resolver(items, query));
}
else {
mem.timer = setTimeout(fetchBatch, scheduled);
return mem.currentRequest.value.then((items) => config.resolver(items, query));
}
};

@@ -106,4 +121,27 @@ return { fetch };

};
/**
* Same as windowScheduler, will batch calls made within a window of time OR when the max batch size is reached.
*
* @param config: {windowMs: number; maxBatchSize: number;}
* @returns BatcherScheduler
*/
const windowedFiniteBatchScheduler = ({ windowMs, maxBatchSize }) => (start, latest, batchSize) => {
if (batchSize >= maxBatchSize)
return "immediate";
const spent = latest - start;
return windowMs - spent;
};
/**
* Will batch calls when the max batch size is reached.
*
* @param config: {maxBatchSize: number;}
* @returns BatcherScheduler
*/
const maxBatchSizeScheduler = ({ maxBatchSize }) => (_start, _latest, batchSize) => {
if (batchSize >= maxBatchSize)
return "immediate";
return "never";
};
export { bufferScheduler, create, indexedResolver, keyResolver, windowScheduler };
export { bufferScheduler, create, indexedResolver, keyResolver, maxBatchSizeScheduler, windowScheduler, windowedFiniteBatchScheduler };
//# sourceMappingURL=index.esm.js.map

@@ -28,2 +28,9 @@ 'use strict';

devtools?.create({ seq: mem.seq });
const nextBatch = () => {
mem.batch = new Set();
mem.currentRequest = deferred.deferred();
mem.timer = undefined;
mem.start = null;
mem.latest = null;
};
const fetch = (query) => {

@@ -35,3 +42,3 @@ if (!mem.start)

clearTimeout(mem.timer);
const scheduled = scheduler(mem.start, mem.latest);
const scheduled = scheduler(mem.start, mem.latest, mem.batch.size);
devtools?.queue({

@@ -45,3 +52,3 @@ seq: mem.seq,

});
mem.timer = setTimeout(() => {
const fetchBatch = () => {
const currentSeq = mem.seq;

@@ -51,7 +58,3 @@ const req = config.fetcher([...mem.batch]);

devtools?.fetch({ seq: currentSeq, batch: [...mem.batch] });
mem.batch = new Set();
mem.currentRequest = deferred.deferred();
mem.timer = undefined;
mem.start = null;
mem.latest = null;
nextBatch();
req

@@ -67,4 +70,16 @@ .then((data) => {

mem.seq++;
}, scheduled);
return mem.currentRequest.value.then((items) => config.resolver(items, query));
return req;
};
if (scheduled === "immediate") {
const req = mem.currentRequest;
fetchBatch();
return req.value.then((items) => config.resolver(items, query));
}
else if (scheduled === "never") {
return mem.currentRequest.value.then((items) => config.resolver(items, query));
}
else {
mem.timer = setTimeout(fetchBatch, scheduled);
return mem.currentRequest.value.then((items) => config.resolver(items, query));
}
};

@@ -108,2 +123,25 @@ return { fetch };

};
/**
* Same as windowScheduler, will batch calls made within a window of time OR when the max batch size is reached.
*
* @param config: {windowMs: number; maxBatchSize: number;}
* @returns BatcherScheduler
*/
const windowedFiniteBatchScheduler = ({ windowMs, maxBatchSize }) => (start, latest, batchSize) => {
if (batchSize >= maxBatchSize)
return "immediate";
const spent = latest - start;
return windowMs - spent;
};
/**
* Will batch calls when the max batch size is reached.
*
* @param config: {maxBatchSize: number;}
* @returns BatcherScheduler
*/
const maxBatchSizeScheduler = ({ maxBatchSize }) => (_start, _latest, batchSize) => {
if (batchSize >= maxBatchSize)
return "immediate";
return "never";
};

@@ -114,3 +152,5 @@ exports.bufferScheduler = bufferScheduler;

exports.keyResolver = keyResolver;
exports.maxBatchSizeScheduler = maxBatchSizeScheduler;
exports.windowScheduler = windowScheduler;
exports.windowedFiniteBatchScheduler = windowedFiniteBatchScheduler;
//# sourceMappingURL=index.js.map

5

package.json
{
"name": "@yornaath/batshit",
"version": "0.8.0",
"version": "0.9.0",
"description": "A batch manager that will deduplicate and batch requests for a certain data type made within a window.",

@@ -9,2 +9,3 @@ "author": {

},
"license": "MIT",
"publishConfig": {

@@ -34,3 +35,3 @@ "access": "public"

"dependencies": {
"@yornaath/batshit-devtools": "^1.5.0"
"@yornaath/batshit-devtools": "^1.6.0"
},

@@ -37,0 +38,0 @@ "scripts": {

@@ -114,2 +114,33 @@ # @yornaath/batshit [![CI](https://github.com/yornaath/batshit/actions/workflows/ci.yml/badge.svg)](https://github.com/yornaath/batshit/actions/workflows/ci.yml)

### Limit batch size
We provide two helper functions for limiting the number of batched fetch calls.
#### `windowedFiniteBatchScheduler`
This will batch all calls made within a certain time frame UP to a certain max batch size before it starts a new batch
```ts
const batcher = batshit.create({
...,
scheduler: maxBatchSizeScheduler({
windowMs: 10,
maxBatchSize: 100,
}),
});
```
#### `maxBatchSizeScheduler`
Same as the one above, but will only wait indefinetly until the batch size is met.
```ts
const batcher = batshit.create({
...,
scheduler: maxBatchSizeScheduler({
maxBatchSize: 100,
}),
});
```
### Fetching where response is an object of items

@@ -122,4 +153,4 @@

{
1: {"username": "bob"},
2: {"username": "alice"}
"1": {"username": "bob"},
"2": {"username": "alice"}
}

@@ -132,4 +163,4 @@ ```

const batcher = batshit.create({
fetcher: async (ids: number[]) => {
const users: Record<number, User> = await fetchUserRecords(ids)
fetcher: async (ids: string[]) => {
const users: Record<string, User> = await fetchUserRecords(ids)
return users

@@ -136,0 +167,0 @@ },

@@ -67,5 +67,10 @@ import type { DevtoolsListener } from "@yornaath/batshit-devtools";

*/
(start: number, latest: number): number;
(start: number, latest: number, batchSize: number): Schedule;
};
/**
* A schedule for when to execute a batched fetch call.
*/
export type Schedule = number | "immediate" | "never";
export type BatcherMemory<T, Q> = {

@@ -112,2 +117,10 @@ seq: number;

const nextBatch = () => {
mem.batch = new Set();
mem.currentRequest = deferred<T>();
mem.timer = undefined;
mem.start = null;
mem.latest = null;
};
const fetch = (query: Q): Promise<R> => {

@@ -120,3 +133,3 @@ if (!mem.start) mem.start = Date.now();

const scheduled = scheduler(mem.start, mem.latest);
const scheduled = scheduler(mem.start, mem.latest, mem.batch.size);

@@ -132,3 +145,3 @@ devtools?.queue({

mem.timer = setTimeout(() => {
const fetchBatch = () => {
const currentSeq = mem.seq;

@@ -140,7 +153,3 @@ const req = config.fetcher([...mem.batch]);

mem.batch = new Set();
mem.currentRequest = deferred<T>();
mem.timer = undefined;
mem.start = null;
mem.latest = null;
nextBatch();

@@ -158,7 +167,20 @@ req

mem.seq++;
}, scheduled);
return mem.currentRequest.value.then((items) =>
config.resolver(items, query)
);
return req;
};
if (scheduled === "immediate") {
const req = mem.currentRequest;
fetchBatch();
return req.value.then((items) => config.resolver(items, query));
} else if (scheduled === "never") {
return mem.currentRequest.value.then((items) =>
config.resolver(items, query)
);
} else {
mem.timer = setTimeout(fetchBatch, scheduled);
return mem.currentRequest.value.then((items) =>
config.resolver(items, query)
);
}
};

@@ -216,1 +238,33 @@

};
/**
* Same as windowScheduler, will batch calls made within a window of time OR when the max batch size is reached.
*
* @param config: {windowMs: number; maxBatchSize: number;}
* @returns BatcherScheduler
*/
export const windowedFiniteBatchScheduler: (config: {
windowMs: number;
maxBatchSize: number;
}) => BatcherScheduler =
({ windowMs, maxBatchSize }) =>
(start, latest, batchSize) => {
if (batchSize >= maxBatchSize) return "immediate";
const spent = latest - start;
return windowMs - spent;
};
/**
* Will batch calls when the max batch size is reached.
*
* @param config: {maxBatchSize: number;}
* @returns BatcherScheduler
*/
export const maxBatchSizeScheduler: (config: {
maxBatchSize: number;
}) => BatcherScheduler =
({ maxBatchSize }) =>
(_start, _latest, batchSize) => {
if (batchSize >= maxBatchSize) return "immediate";
return "never";
};

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc