Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@timberio/tools

Package Overview
Dependencies
Maintainers
5
Versions
36
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@timberio/tools - npm Package Compare versions

Comparing version 0.22.0 to 0.23.0

26

dist/cjs/batch.js
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
/*
* Min buffer size to gracefully fix a bad value with an obvious default.
* So caller can not defeat the purpose of batching.
* Default buffer size
*/
const MIN_BUFFER_SIZE = 5;
const DEFAULT_BUFFER_SIZE = 5;
/*
* Min buffer size to flush when limit is about to be reached.
* Default flush timeout
*/
const MAX_BUFFER_SIZE = 100;
/*
* Min timeout to gracefully fix a bad value with an obvious default.
* So caller can not defeat the purpose of batching.
*/
const MIN_FLUSH_TIMEOUT = 1000;
const DEFAULT_FLUSH_TIMEOUT = 1000;
/**

@@ -23,11 +17,3 @@ * batch the buffer coming in, process them and then resolve

*/
function makeBatch(size = MIN_BUFFER_SIZE, flushTimeout = MIN_FLUSH_TIMEOUT) {
if (size < MIN_BUFFER_SIZE) {
console.warn(`warning: Gracefully fixing bad value of batch size to default ${MIN_BUFFER_SIZE}`);
size = MIN_BUFFER_SIZE;
}
if (flushTimeout < MIN_FLUSH_TIMEOUT) {
console.warn(`warning: Gracefully fixing bad value of timeout to default ${MIN_FLUSH_TIMEOUT}`);
flushTimeout = MIN_FLUSH_TIMEOUT;
}
function makeBatch(size = DEFAULT_BUFFER_SIZE, flushTimeout = DEFAULT_FLUSH_TIMEOUT) {
let timeout;

@@ -77,3 +63,3 @@ let cb;

buffer.push({ log, resolve, reject });
if (buffer.length >= size || buffer.length === MAX_BUFFER_SIZE - 1) {
if (buffer.length >= size) {
await flush();

@@ -80,0 +66,0 @@ }

@@ -16,3 +16,2 @@ "use strict";

return {
$schema: "https://raw.githubusercontent.com/timberio/log-event-json-schema/v4.1.0/schema.json",
dt: new Date(),

@@ -41,76 +40,2 @@ level: types_1.LogLevel.Info,

describe("batch tests", () => {
it("should log warning if buffer size is lower than 5", () => {
const size = 3;
const sendTimeout = 1000;
const expectedWarning = "warning: Gracefully fixing bad value of batch size to default 5";
const warning = jest.spyOn(global.console, "warn");
const batcher = batch_1.default(size, sendTimeout);
expect(warning).toBeCalledWith(expectedWarning);
warning.mockRestore();
});
it("should log warning if flush timeout is lower than 1000", () => {
const size = 5;
const sendTimeout = 999;
const expectedWarning = "warning: Gracefully fixing bad value of timeout to default 1000";
const warning = jest.spyOn(global.console, "warn");
jest.spyOn(global.console, "warn");
const batcher = batch_1.default(size, sendTimeout);
expect(console.warn).toBeCalledWith(expectedWarning);
warning.mockRestore();
});
it("should use default buffer size value 5 when size is passed as undefined", () => {
const size = undefined;
const sendTimeout = 1000;
const warning = jest.spyOn(global.console, "warn");
const batcher = batch_1.default(size, sendTimeout);
expect(console.warn).toHaveBeenCalledTimes(0);
warning.mockRestore();
});
it("should use default flush timeout value 1000 when passed as undefined", () => {
const size = 5;
const sendTimeout = undefined;
const warning = jest.spyOn(global.console, "warn");
const batcher = batch_1.default(size, sendTimeout);
expect(console.warn).toHaveBeenCalledTimes(0);
warning.mockRestore();
});
it("should default to size of 5, if size is less than 5", async () => {
const size = 4;
const sendTimeout = 1000;
const batcher = batch_1.default(size, sendTimeout);
const logger = batcher((batch) => {
expect(batch.length).toEqual(5);
});
await Promise.all(logNumberTimes(logger, 5)).catch(e => {
throw e;
});
}, 1100);
it("should default to timeout of 1 sec, if timeout is less than that", done => {
const size = 6;
const sendTimeout = 10;
const batcher = batch_1.default(size, sendTimeout);
const logger = batcher((batch) => {
expect([2, 1].includes(batch.length)).toBeTruthy();
done();
});
logger(getRandomLog()).catch(e => {
throw e;
});
setTimeout(() => logger(getRandomLog()), 500);
setTimeout(() => logger(getRandomLog()), 1001);
}, 2100);
it("should flush the batch when batch length is one less than max possible size.", done => {
const size = 200;
const sendTimeout = 10;
const batcher = batch_1.default(size, sendTimeout);
const logger = batcher((batch) => {
expect([99, 2].includes(batch.length)).toBeTruthy();
done();
});
for (let i = 0; i <= 100; i++) {
logger(getRandomLog()).catch(e => {
throw e;
});
}
}, 1100);
it("should not fire timeout while a send was happening.", async (done) => {

@@ -140,26 +65,2 @@ nock_1.default("http://example.com")

});
it("should handle another log that comes in while it's sending...", async (done) => {
nock_1.default("http://example.com")
.get("/")
.reply(200, new Promise(res => setTimeout(() => res(200), 1003)));
const called = jest.fn();
const size = 5;
const sendTimeout = 10;
const batcher = batch_1.default(size, sendTimeout);
const logger = batcher(async () => {
called();
try {
await cross_fetch_1.default("http://example.com");
}
catch (e) {
throw e;
}
});
await Promise.all(logNumberTimes(logger, 6)).catch(e => {
throw e;
});
expect(called).toHaveBeenCalledTimes(2);
nock_1.default.restore();
done();
});
it("should play nicely with `throttle`", async () => {

@@ -166,0 +67,0 @@ // Fixtures

@@ -15,3 +15,2 @@ "use strict";

return {
$schema: "https://raw.githubusercontent.com/timberio/log-event-json-schema/v4.1.0/schema.json",
dt: new Date(),

@@ -18,0 +17,0 @@ level: types_1.LogLevel.Info,

/*
* Min buffer size to gracefully fix a bad value with an obvious default.
* So caller can not defeat the purpose of batching.
* Default buffer size
*/
const MIN_BUFFER_SIZE = 5;
const DEFAULT_BUFFER_SIZE = 5;
/*
* Min buffer size to flush when limit is about to be reached.
* Default flush timeout
*/
const MAX_BUFFER_SIZE = 100;
/*
* Min timeout to gracefully fix a bad value with an obvious default.
* So caller can not defeat the purpose of batching.
*/
const MIN_FLUSH_TIMEOUT = 1000;
const DEFAULT_FLUSH_TIMEOUT = 1000;
/**

@@ -21,11 +15,3 @@ * batch the buffer coming in, process them and then resolve

*/
export default function makeBatch(size = MIN_BUFFER_SIZE, flushTimeout = MIN_FLUSH_TIMEOUT) {
if (size < MIN_BUFFER_SIZE) {
console.warn(`warning: Gracefully fixing bad value of batch size to default ${MIN_BUFFER_SIZE}`);
size = MIN_BUFFER_SIZE;
}
if (flushTimeout < MIN_FLUSH_TIMEOUT) {
console.warn(`warning: Gracefully fixing bad value of timeout to default ${MIN_FLUSH_TIMEOUT}`);
flushTimeout = MIN_FLUSH_TIMEOUT;
}
export default function makeBatch(size = DEFAULT_BUFFER_SIZE, flushTimeout = DEFAULT_FLUSH_TIMEOUT) {
let timeout;

@@ -75,3 +61,3 @@ let cb;

buffer.push({ log, resolve, reject });
if (buffer.length >= size || buffer.length === MAX_BUFFER_SIZE - 1) {
if (buffer.length >= size) {
await flush();

@@ -78,0 +64,0 @@ }

@@ -11,3 +11,2 @@ import nock from "nock";

return {
$schema: "https://raw.githubusercontent.com/timberio/log-event-json-schema/v4.1.0/schema.json",
dt: new Date(),

@@ -36,76 +35,2 @@ level: LogLevel.Info,

describe("batch tests", () => {
it("should log warning if buffer size is lower than 5", () => {
const size = 3;
const sendTimeout = 1000;
const expectedWarning = "warning: Gracefully fixing bad value of batch size to default 5";
const warning = jest.spyOn(global.console, "warn");
const batcher = makeBatch(size, sendTimeout);
expect(warning).toBeCalledWith(expectedWarning);
warning.mockRestore();
});
it("should log warning if flush timeout is lower than 1000", () => {
const size = 5;
const sendTimeout = 999;
const expectedWarning = "warning: Gracefully fixing bad value of timeout to default 1000";
const warning = jest.spyOn(global.console, "warn");
jest.spyOn(global.console, "warn");
const batcher = makeBatch(size, sendTimeout);
expect(console.warn).toBeCalledWith(expectedWarning);
warning.mockRestore();
});
it("should use default buffer size value 5 when size is passed as undefined", () => {
const size = undefined;
const sendTimeout = 1000;
const warning = jest.spyOn(global.console, "warn");
const batcher = makeBatch(size, sendTimeout);
expect(console.warn).toHaveBeenCalledTimes(0);
warning.mockRestore();
});
it("should use default flush timeout value 1000 when passed as undefined", () => {
const size = 5;
const sendTimeout = undefined;
const warning = jest.spyOn(global.console, "warn");
const batcher = makeBatch(size, sendTimeout);
expect(console.warn).toHaveBeenCalledTimes(0);
warning.mockRestore();
});
it("should default to size of 5, if size is less than 5", async () => {
const size = 4;
const sendTimeout = 1000;
const batcher = makeBatch(size, sendTimeout);
const logger = batcher((batch) => {
expect(batch.length).toEqual(5);
});
await Promise.all(logNumberTimes(logger, 5)).catch(e => {
throw e;
});
}, 1100);
it("should default to timeout of 1 sec, if timeout is less than that", done => {
const size = 6;
const sendTimeout = 10;
const batcher = makeBatch(size, sendTimeout);
const logger = batcher((batch) => {
expect([2, 1].includes(batch.length)).toBeTruthy();
done();
});
logger(getRandomLog()).catch(e => {
throw e;
});
setTimeout(() => logger(getRandomLog()), 500);
setTimeout(() => logger(getRandomLog()), 1001);
}, 2100);
it("should flush the batch when batch length is one less than max possible size.", done => {
const size = 200;
const sendTimeout = 10;
const batcher = makeBatch(size, sendTimeout);
const logger = batcher((batch) => {
expect([99, 2].includes(batch.length)).toBeTruthy();
done();
});
for (let i = 0; i <= 100; i++) {
logger(getRandomLog()).catch(e => {
throw e;
});
}
}, 1100);
it("should not fire timeout while a send was happening.", async (done) => {

@@ -135,26 +60,2 @@ nock("http://example.com")

});
it("should handle another log that comes in while it's sending...", async (done) => {
nock("http://example.com")
.get("/")
.reply(200, new Promise(res => setTimeout(() => res(200), 1003)));
const called = jest.fn();
const size = 5;
const sendTimeout = 10;
const batcher = makeBatch(size, sendTimeout);
const logger = batcher(async () => {
called();
try {
await fetch("http://example.com");
}
catch (e) {
throw e;
}
});
await Promise.all(logNumberTimes(logger, 6)).catch(e => {
throw e;
});
expect(called).toHaveBeenCalledTimes(2);
nock.restore();
done();
});
it("should play nicely with `throttle`", async () => {

@@ -161,0 +62,0 @@ // Fixtures

@@ -10,3 +10,2 @@ import { LogLevel } from "@timberio/types";

return {
$schema: "https://raw.githubusercontent.com/timberio/log-event-json-schema/v4.1.0/schema.json",
dt: new Date(),

@@ -13,0 +12,0 @@ level: LogLevel.Info,

# License
Copyright (c) 2018. Timber Technologies, Inc.
Copyright (c) 2018-2019. Timber Technologies, Inc.

@@ -5,0 +5,0 @@ Permission to use, copy, modify, and/or distribute this software for any purpose

{
"name": "@timberio/tools",
"version": "0.22.0",
"version": "0.23.0",
"description": "Javascript logging tools",

@@ -36,6 +36,6 @@ "main": "dist/cjs/index.js",

},
"gitHead": "40597144b884fdb8481049eac289b7ff9015a671",
"gitHead": "32341c6209e9b0a309142cccccffd7eb90284da4",
"dependencies": {
"@timberio/types": "^0.22.0"
"@timberio/types": "^0.23.0"
}
}

@@ -12,4 +12,2 @@ import nock from "nock";

return {
$schema:
"https://raw.githubusercontent.com/timberio/log-event-json-schema/v4.1.0/schema.json",
dt: new Date(),

@@ -41,100 +39,2 @@ level: LogLevel.Info,

describe("batch tests", () => {
it("should log warning if buffer size is lower than 5", () => {
const size = 3;
const sendTimeout = 1000;
const expectedWarning =
"warning: Gracefully fixing bad value of batch size to default 5";
const warning = jest.spyOn(global.console, "warn");
const batcher = makeBatch(size, sendTimeout);
expect(warning).toBeCalledWith(expectedWarning);
warning.mockRestore();
});
it("should log warning if flush timeout is lower than 1000", () => {
const size = 5;
const sendTimeout = 999;
const expectedWarning =
"warning: Gracefully fixing bad value of timeout to default 1000";
const warning = jest.spyOn(global.console, "warn");
jest.spyOn(global.console, "warn");
const batcher = makeBatch(size, sendTimeout);
expect(console.warn).toBeCalledWith(expectedWarning);
warning.mockRestore();
});
it("should use default buffer size value 5 when size is passed as undefined", () => {
const size = undefined;
const sendTimeout = 1000;
const warning = jest.spyOn(global.console, "warn");
const batcher = makeBatch(size, sendTimeout);
expect(console.warn).toHaveBeenCalledTimes(0);
warning.mockRestore();
});
it("should use default flush timeout value 1000 when passed as undefined", () => {
const size = 5;
const sendTimeout = undefined;
const warning = jest.spyOn(global.console, "warn");
const batcher = makeBatch(size, sendTimeout);
expect(console.warn).toHaveBeenCalledTimes(0);
warning.mockRestore();
});
it("should default to size of 5, if size is less than 5", async () => {
const size = 4;
const sendTimeout = 1000;
const batcher = makeBatch(size, sendTimeout);
const logger = batcher((batch: ITimberLog[]) => {
expect(batch.length).toEqual(5);
});
await Promise.all(logNumberTimes(logger, 5)).catch(e => {
throw e;
});
}, 1100);
it("should default to timeout of 1 sec, if timeout is less than that", done => {
const size = 6;
const sendTimeout = 10;
const batcher = makeBatch(size, sendTimeout);
const logger = batcher((batch: ITimberLog[]) => {
expect([2, 1].includes(batch.length)).toBeTruthy();
done();
});
logger(getRandomLog()).catch(e => {
throw e;
});
setTimeout(() => logger(getRandomLog()), 500);
setTimeout(() => logger(getRandomLog()), 1001);
}, 2100);
it("should flush the batch when batch length is one less than max possible size.", done => {
const size = 200;
const sendTimeout = 10;
const batcher = makeBatch(size, sendTimeout);
const logger = batcher((batch: ITimberLog[]) => {
expect([99, 2].includes(batch.length)).toBeTruthy();
done();
});
for (let i = 0; i <= 100; i++) {
logger(getRandomLog()).catch(e => {
throw e;
});
}
}, 1100);
it("should not fire timeout while a send was happening.", async done => {

@@ -167,29 +67,2 @@ nock("http://example.com")

it("should handle another log that comes in while it's sending...", async done => {
nock("http://example.com")
.get("/")
.reply(200, new Promise(res => setTimeout(() => res(200), 1003)));
const called = jest.fn();
const size = 5;
const sendTimeout = 10;
const batcher = makeBatch(size, sendTimeout);
const logger = batcher(async () => {
called();
try {
await fetch("http://example.com");
} catch (e) {
throw e;
}
});
await Promise.all(logNumberTimes(logger, 6)).catch(e => {
throw e;
});
expect(called).toHaveBeenCalledTimes(2);
nock.restore();
done();
});
it("should play nicely with `throttle`", async () => {

@@ -196,0 +69,0 @@ // Fixtures

@@ -15,18 +15,11 @@ import { ITimberLog } from "@timberio/types";

/*
* Min buffer size to gracefully fix a bad value with an obvious default.
* So caller can not defeat the purpose of batching.
* Default buffer size
*/
const MIN_BUFFER_SIZE = 5;
const DEFAULT_BUFFER_SIZE = 5;
/*
* Min buffer size to flush when limit is about to be reached.
* Default flush timeout
*/
const MAX_BUFFER_SIZE = 100;
const DEFAULT_FLUSH_TIMEOUT = 1000;
/*
* Min timeout to gracefully fix a bad value with an obvious default.
* So caller can not defeat the purpose of batching.
*/
const MIN_FLUSH_TIMEOUT = 1000;
/**

@@ -39,18 +32,5 @@ * batch the buffer coming in, process them and then resolve

export default function makeBatch(
size: number = MIN_BUFFER_SIZE,
flushTimeout: number = MIN_FLUSH_TIMEOUT
size: number = DEFAULT_BUFFER_SIZE,
flushTimeout: number = DEFAULT_FLUSH_TIMEOUT
) {
if (size < MIN_BUFFER_SIZE) {
console.warn(
`warning: Gracefully fixing bad value of batch size to default ${MIN_BUFFER_SIZE}`
);
size = MIN_BUFFER_SIZE;
}
if (flushTimeout < MIN_FLUSH_TIMEOUT) {
console.warn(
`warning: Gracefully fixing bad value of timeout to default ${MIN_FLUSH_TIMEOUT}`
);
flushTimeout = MIN_FLUSH_TIMEOUT;
}
let timeout: NodeJS.Timeout | null;

@@ -106,3 +86,3 @@ let cb: Function;

if (buffer.length >= size || buffer.length === MAX_BUFFER_SIZE - 1) {
if (buffer.length >= size) {
await flush();

@@ -109,0 +89,0 @@ } else {

@@ -11,4 +11,2 @@ import { ITimberLog, LogLevel } from "@timberio/types";

return {
$schema:
"https://raw.githubusercontent.com/timberio/log-event-json-schema/v4.1.0/schema.json",
dt: new Date(),

@@ -15,0 +13,0 @@ level: LogLevel.Info,

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc