Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@logtail/tools

Package Overview
Dependencies
Maintainers
0
Versions
27
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@logtail/tools - npm Package Compare versions

Comparing version 0.4.21 to 0.5.0

10

dist/cjs/batch.js
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.calculateJsonLogSizeBytes = void 0;
exports.default = makeBatch;
/*

@@ -56,4 +57,4 @@ * Default buffer size

try {
await cb(currentBuffer.map(d => d.log));
currentBuffer.forEach(d => d.resolve(d.log));
await cb(currentBuffer.map((d) => d.log));
currentBuffer.forEach((d) => d.resolve(d.log));
retry = 0;

@@ -70,3 +71,3 @@ }

}
currentBuffer.map(d => d.reject(e));
currentBuffer.map((d) => d.reject(e));
retry = 0;

@@ -82,3 +83,3 @@ }

}
return new Promise(resolve => {
return new Promise((resolve) => {
timeout = setTimeout(async function () {

@@ -124,3 +125,2 @@ await flush();

}
exports.default = makeBatch;
//# sourceMappingURL=batch.js.map

@@ -72,3 +72,3 @@ "use strict";

.get("/")
.reply(200, new Promise(res => setTimeout(() => res(200), 1003)));
.reply(200, new Promise((res) => setTimeout(() => res(200), 1003)));
const called = jest.fn();

@@ -87,3 +87,3 @@ const size = 5;

});
await Promise.all(logNumberTimes(logger, 5)).catch(e => {
await Promise.all(logNumberTimes(logger, 5)).catch((e) => {
throw e;

@@ -105,3 +105,3 @@ });

});
await Promise.all(logNumberTimes(logger, 5)).catch(e => { });
await Promise.all(logNumberTimes(logger, 5)).catch((e) => { });
expect(called).toHaveBeenCalledTimes(4); // 3 retries + 1 initial

@@ -121,3 +121,3 @@ });

});
logger(getRandomLog()).catch(e => { });
logger(getRandomLog()).catch((e) => { });
await batcher.flush();

@@ -136,3 +136,3 @@ expect(called).toHaveBeenCalledTimes(4); // 3 retries + 1 initial

const throttler = throttle(async (logs) => {
return new Promise(resolve => {
return new Promise((resolve) => {
setTimeout(() => resolve(logs), throttleResolveAfter);

@@ -162,3 +162,4 @@ });

const expectedTime = ((numberOfLogs / batchSize) * throttleResolveAfter) / maxThrottle;
expect(end).toBeGreaterThanOrEqual(expectedTime);
const toleranceMilliseconds = 0.2;
expect(end).toBeGreaterThanOrEqual(expectedTime - toleranceMilliseconds);
});

@@ -168,3 +169,3 @@ it("should send after flush (with long timeout)", async () => {

.get("/")
.reply(200, new Promise(res => setTimeout(() => res(200), 1003)));
.reply(200, new Promise((res) => setTimeout(() => res(200), 1003)));
const called = jest.fn();

@@ -207,3 +208,3 @@ const size = 50;

// 100 logs with 50B each is 5000B in total - expecting 10 batches of 500B
await Promise.all(logNumberTimes(logger, 100)).catch(e => {
await Promise.all(logNumberTimes(logger, 100)).catch((e) => {
throw e;

@@ -210,0 +211,0 @@ });

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.default = makeBurstProtection;
const RESOLUTION = 64;

@@ -25,6 +26,3 @@ /**

const intervalCountSinceLast = Math.floor((now - lastIntervalTime) / intervalLength);
callCounts = Array(Math.min(intervalCountSinceLast, RESOLUTION))
.fill(0)
.concat(callCounts)
.slice(0, RESOLUTION);
callCounts = Array(Math.min(intervalCountSinceLast, RESOLUTION)).fill(0).concat(callCounts).slice(0, RESOLUTION);
lastIntervalTime += intervalCountSinceLast * intervalLength;

@@ -53,3 +51,2 @@ }

}
exports.default = makeBurstProtection;
//# sourceMappingURL=burstProtection.js.map
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.base64Encode = void 0;
exports.base64Encode = base64Encode;
/**

@@ -12,3 +12,2 @@ * Converts a plain-text string to a base64 string

}
exports.base64Encode = base64Encode;
//# sourceMappingURL=encode.js.map

@@ -27,3 +27,3 @@ "use strict";

// against the values from the queue
list.forEach(item => {
list.forEach((item) => {
expect(q.shift()).toEqual(item);

@@ -30,0 +30,0 @@ });

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.default = makeRetry;
/**

@@ -13,3 +14,3 @@ * maximum number of tries to push logs to logtail

function delay(sec) {
return new Promise(resolve => setTimeout(resolve, sec * 1000));
return new Promise((resolve) => setTimeout(resolve, sec * 1000));
}

@@ -62,3 +63,2 @@ /**

}
exports.default = makeRetry;
//# sourceMappingURL=retry.js.map

@@ -39,5 +39,3 @@ "use strict";

const logs = [getRandomLog()];
(0, nock_1.default)("http://example.com")
.get("/")
.reply(200, logs);
(0, nock_1.default)("http://example.com").get("/").reply(200, logs);
const sync = makeSync(called);

@@ -51,7 +49,3 @@ const retry = await (0, retry_1.default)(sync);

const logs = [getRandomLog()];
(0, nock_1.default)("http://example.com")
.get("/")
.reply(500, "Bad")
.get("/")
.reply(200, logs);
(0, nock_1.default)("http://example.com").get("/").reply(500, "Bad").get("/").reply(200, logs);
const sync = makeSync(called);

@@ -65,9 +59,3 @@ const retry = await (0, retry_1.default)(sync);

const logs = [getRandomLog()];
(0, nock_1.default)("http://example.com")
.get("/")
.reply(500, "Bad")
.get("/")
.reply(500, "Bad")
.get("/")
.reply(200, logs);
(0, nock_1.default)("http://example.com").get("/").reply(500, "Bad").get("/").reply(500, "Bad").get("/").reply(200, logs);
const sync = makeSync(called);

@@ -74,0 +62,0 @@ const retry = await (0, retry_1.default)(sync);

@@ -6,2 +6,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.default = makeThrottle;
const queue_1 = __importDefault(require("./queue"));

@@ -61,3 +62,2 @@ /**

}
exports.default = makeThrottle;
//# sourceMappingURL=throttle.js.map

@@ -16,3 +16,3 @@ "use strict";

// Create the pipeline function to use the throttle
const pipeline = throttle(async (log) => new Promise(resolve => {
const pipeline = throttle(async (log) => new Promise((resolve) => {
setTimeout(() => {

@@ -35,3 +35,4 @@ resolve(log);

const expectedTime = (numberOfPromises / max) * throttleTime;
expect(end).toBeGreaterThanOrEqual(expectedTime);
const toleranceMilliseconds = 0.2;
expect(end).toBeGreaterThanOrEqual(expectedTime - toleranceMilliseconds);
});

@@ -38,0 +39,0 @@ it("should handle rejections", async () => {

@@ -52,4 +52,4 @@ /*

try {
await cb(currentBuffer.map(d => d.log));
currentBuffer.forEach(d => d.resolve(d.log));
await cb(currentBuffer.map((d) => d.log));
currentBuffer.forEach((d) => d.resolve(d.log));
retry = 0;

@@ -66,3 +66,3 @@ }

}
currentBuffer.map(d => d.reject(e));
currentBuffer.map((d) => d.reject(e));
retry = 0;

@@ -78,3 +78,3 @@ }

}
return new Promise(resolve => {
return new Promise((resolve) => {
timeout = setTimeout(async function () {

@@ -81,0 +81,0 @@ await flush();

@@ -44,3 +44,3 @@ import nock from "nock";

.get("/")
.reply(200, new Promise(res => setTimeout(() => res(200), 1003)));
.reply(200, new Promise((res) => setTimeout(() => res(200), 1003)));
const called = jest.fn();

@@ -59,3 +59,3 @@ const size = 5;

});
await Promise.all(logNumberTimes(logger, 5)).catch(e => {
await Promise.all(logNumberTimes(logger, 5)).catch((e) => {
throw e;

@@ -77,3 +77,3 @@ });

});
await Promise.all(logNumberTimes(logger, 5)).catch(e => { });
await Promise.all(logNumberTimes(logger, 5)).catch((e) => { });
expect(called).toHaveBeenCalledTimes(4); // 3 retries + 1 initial

@@ -93,3 +93,3 @@ });

});
logger(getRandomLog()).catch(e => { });
logger(getRandomLog()).catch((e) => { });
await batcher.flush();

@@ -108,3 +108,3 @@ expect(called).toHaveBeenCalledTimes(4); // 3 retries + 1 initial

const throttler = throttle(async (logs) => {
return new Promise(resolve => {
return new Promise((resolve) => {
setTimeout(() => resolve(logs), throttleResolveAfter);

@@ -134,3 +134,4 @@ });

const expectedTime = ((numberOfLogs / batchSize) * throttleResolveAfter) / maxThrottle;
expect(end).toBeGreaterThanOrEqual(expectedTime);
const toleranceMilliseconds = 0.2;
expect(end).toBeGreaterThanOrEqual(expectedTime - toleranceMilliseconds);
});

@@ -140,3 +141,3 @@ it("should send after flush (with long timeout)", async () => {

.get("/")
.reply(200, new Promise(res => setTimeout(() => res(200), 1003)));
.reply(200, new Promise((res) => setTimeout(() => res(200), 1003)));
const called = jest.fn();

@@ -179,3 +180,3 @@ const size = 50;

// 100 logs with 50B each is 5000B in total - expecting 10 batches of 500B
await Promise.all(logNumberTimes(logger, 100)).catch(e => {
await Promise.all(logNumberTimes(logger, 100)).catch((e) => {
throw e;

@@ -182,0 +183,0 @@ });

@@ -23,6 +23,3 @@ const RESOLUTION = 64;

const intervalCountSinceLast = Math.floor((now - lastIntervalTime) / intervalLength);
callCounts = Array(Math.min(intervalCountSinceLast, RESOLUTION))
.fill(0)
.concat(callCounts)
.slice(0, RESOLUTION);
callCounts = Array(Math.min(intervalCountSinceLast, RESOLUTION)).fill(0).concat(callCounts).slice(0, RESOLUTION);
lastIntervalTime += intervalCountSinceLast * intervalLength;

@@ -29,0 +26,0 @@ }

@@ -22,3 +22,3 @@ import Queue from "./queue";

// against the values from the queue
list.forEach(item => {
list.forEach((item) => {
expect(q.shift()).toEqual(item);

@@ -25,0 +25,0 @@ });

@@ -11,3 +11,3 @@ /**

function delay(sec) {
return new Promise(resolve => setTimeout(resolve, sec * 1000));
return new Promise((resolve) => setTimeout(resolve, sec * 1000));
}

@@ -14,0 +14,0 @@ /**

@@ -34,5 +34,3 @@ import { LogLevel } from "@logtail/types";

const logs = [getRandomLog()];
nock("http://example.com")
.get("/")
.reply(200, logs);
nock("http://example.com").get("/").reply(200, logs);
const sync = makeSync(called);

@@ -46,7 +44,3 @@ const retry = await makeRetry(sync);

const logs = [getRandomLog()];
nock("http://example.com")
.get("/")
.reply(500, "Bad")
.get("/")
.reply(200, logs);
nock("http://example.com").get("/").reply(500, "Bad").get("/").reply(200, logs);
const sync = makeSync(called);

@@ -60,9 +54,3 @@ const retry = await makeRetry(sync);

const logs = [getRandomLog()];
nock("http://example.com")
.get("/")
.reply(500, "Bad")
.get("/")
.reply(500, "Bad")
.get("/")
.reply(200, logs);
nock("http://example.com").get("/").reply(500, "Bad").get("/").reply(500, "Bad").get("/").reply(200, logs);
const sync = makeSync(called);

@@ -69,0 +57,0 @@ const retry = await makeRetry(sync);

@@ -11,3 +11,3 @@ import makeThrottle from "./throttle";

// Create the pipeline function to use the throttle
const pipeline = throttle(async (log) => new Promise(resolve => {
const pipeline = throttle(async (log) => new Promise((resolve) => {
setTimeout(() => {

@@ -30,3 +30,4 @@ resolve(log);

const expectedTime = (numberOfPromises / max) * throttleTime;
expect(end).toBeGreaterThanOrEqual(expectedTime);
const toleranceMilliseconds = 0.2;
expect(end).toBeGreaterThanOrEqual(expectedTime - toleranceMilliseconds);
});

@@ -33,0 +34,0 @@ it("should handle rejections", async () => {

{
"name": "@logtail/tools",
"version": "0.4.21",
"version": "0.5.0",
"description": "Better Stack JavaScript logging tools (formerly Logtail)",

@@ -31,20 +31,12 @@ "main": "dist/cjs/index.js",

"devDependencies": {
"@types/babel__core": "7.0.4",
"@types/babel__traverse": "7.0.4",
"@types/jest": "^24.0.17",
"@types/nock": "^11.1.0",
"@types/node": "^12.7.2",
"@types/source-map": "^0.5.7",
"cross-fetch": "^3.0.4",
"jest": "^24.9.0",
"nock": "^13.3.3",
"npm-run-all": "^4.1.5",
"prettier": "^1.18.2",
"ts-jest": "^24.0.2",
"typescript": "^4.9.5"
"cross-fetch": "^4.0.0",
"nock": "^13.3.3"
},
"dependencies": {
"@logtail/types": "^0.4.20"
"@logtail/types": "^0.5.0",
"cross-fetch": "^4.0.0"
},
"gitHead": "8631392d3c7a619a45911ba33ca7ca6b3038e878"
"gitHead": "206d52b85903715541e5acbb5b5f1c63298b0714"
}

@@ -69,4 +69,4 @@ # [Better Stack](https://betterstack.com/logs) JavaScript client: Helper tools

// Create a basic pipeline function which resolves after 2 seconds
const pipeline = async log =>
new Promise(resolve => {
const pipeline = async (log) =>
new Promise((resolve) => {
setTimeout(() => resolve(log), 2000);

@@ -73,0 +73,0 @@ });

@@ -49,3 +49,3 @@ import nock from "nock";

.get("/")
.reply(200, new Promise(res => setTimeout(() => res(200), 1003)));
.reply(200, new Promise((res) => setTimeout(() => res(200), 1003)));

@@ -66,3 +66,3 @@ const called = jest.fn();

await Promise.all(logNumberTimes(logger, 5)).catch(e => {
await Promise.all(logNumberTimes(logger, 5)).catch((e) => {
throw e;

@@ -87,3 +87,3 @@ });

await Promise.all(logNumberTimes(logger, 5)).catch(e => {});
await Promise.all(logNumberTimes(logger, 5)).catch((e) => {});
expect(called).toHaveBeenCalledTimes(4); // 3 retries + 1 initial

@@ -106,3 +106,3 @@ });

logger(getRandomLog()).catch(e => {});
logger(getRandomLog()).catch((e) => {});
await batcher.flush();

@@ -124,4 +124,4 @@

// Resolve the throttler after 1 second
const throttler = throttle(async logs => {
return new Promise(resolve => {
const throttler = throttle(async (logs) => {
return new Promise((resolve) => {
setTimeout(() => resolve(logs), throttleResolveAfter);

@@ -159,4 +159,5 @@ });

const expectedTime = ((numberOfLogs / batchSize) * throttleResolveAfter) / maxThrottle;
const toleranceMilliseconds = 0.2;
expect(end).toBeGreaterThanOrEqual(expectedTime);
expect(end).toBeGreaterThanOrEqual(expectedTime - toleranceMilliseconds);
});

@@ -167,3 +168,3 @@

.get("/")
.reply(200, new Promise(res => setTimeout(() => res(200), 1003)));
.reply(200, new Promise((res) => setTimeout(() => res(200), 1003)));

@@ -211,3 +212,3 @@ const called = jest.fn();

// 100 logs with 50B each is 5000B in total - expecting 10 batches of 500B
await Promise.all(logNumberTimes(logger, 100)).catch(e => {
await Promise.all(logNumberTimes(logger, 100)).catch((e) => {
throw e;

@@ -214,0 +215,0 @@ });

@@ -79,4 +79,4 @@ import { ILogtailLog } from "@logtail/types";

try {
await cb(currentBuffer.map(d => d.log));
currentBuffer.forEach(d => d.resolve(d.log));
await cb(currentBuffer.map((d) => d.log));
currentBuffer.forEach((d) => d.resolve(d.log));
retry = 0;

@@ -92,3 +92,3 @@ } catch (e) {

}
currentBuffer.map(d => d.reject(e));
currentBuffer.map((d) => d.reject(e));
retry = 0;

@@ -106,4 +106,4 @@ }

return new Promise<void>(resolve => {
timeout = setTimeout(async function() {
return new Promise<void>((resolve) => {
timeout = setTimeout(async function () {
await flush();

@@ -120,3 +120,3 @@ resolve();

return {
initPusher: function(fn: Function) {
initPusher: function (fn: Function) {
cb = fn;

@@ -128,3 +128,3 @@

*/
return async function(log: ILogtailLog): Promise<ILogtailLog> {
return async function (log: ILogtailLog): Promise<ILogtailLog> {
return new Promise<ILogtailLog>(async (resolve, reject) => {

@@ -131,0 +131,0 @@ buffer.push({ log, resolve, reject });

@@ -34,6 +34,3 @@ import { InferArgs } from "./types";

const intervalCountSinceLast = Math.floor((now - lastIntervalTime) / intervalLength);
callCounts = Array(Math.min(intervalCountSinceLast, RESOLUTION))
.fill(0)
.concat(callCounts)
.slice(0, RESOLUTION);
callCounts = Array(Math.min(intervalCountSinceLast, RESOLUTION)).fill(0).concat(callCounts).slice(0, RESOLUTION);
lastIntervalTime += intervalCountSinceLast * intervalLength;

@@ -40,0 +37,0 @@ }

@@ -28,3 +28,3 @@ import Queue from "./queue";

// against the values from the queue
list.forEach(item => {
list.forEach((item) => {
expect(q.shift()).toEqual(item);

@@ -31,0 +31,0 @@ });

@@ -37,5 +37,3 @@ import { ILogtailLog, LogLevel } from "@logtail/types";

nock("http://example.com")
.get("/")
.reply(200, logs);
nock("http://example.com").get("/").reply(200, logs);

@@ -52,7 +50,3 @@ const sync = makeSync(called);

nock("http://example.com")
.get("/")
.reply(500, "Bad")
.get("/")
.reply(200, logs);
nock("http://example.com").get("/").reply(500, "Bad").get("/").reply(200, logs);

@@ -69,9 +63,3 @@ const sync = makeSync(called);

nock("http://example.com")
.get("/")
.reply(500, "Bad")
.get("/")
.reply(500, "Bad")
.get("/")
.reply(200, logs);
nock("http://example.com").get("/").reply(500, "Bad").get("/").reply(500, "Bad").get("/").reply(200, logs);

@@ -78,0 +66,0 @@ const sync = makeSync(called);

@@ -14,3 +14,3 @@ import { ILogtailLog } from "@logtail/types";

function delay(sec: number): Promise<any> {
return new Promise(resolve => setTimeout(resolve, sec * 1000));
return new Promise((resolve) => setTimeout(resolve, sec * 1000));
}

@@ -17,0 +17,0 @@

@@ -23,4 +23,4 @@ import makeThrottle from "./throttle";

const pipeline = throttle(
async log =>
new Promise<ILog>(resolve => {
async (log) =>
new Promise<ILog>((resolve) => {
setTimeout(() => {

@@ -50,4 +50,5 @@ resolve(log);

const expectedTime = (numberOfPromises / max) * throttleTime;
const toleranceMilliseconds = 0.2;
expect(end).toBeGreaterThanOrEqual(expectedTime);
expect(end).toBeGreaterThanOrEqual(expectedTime - toleranceMilliseconds);
});

@@ -66,3 +67,3 @@

// Create a throttled function that will throw half the time
const pipeline = throttle(async i => {
const pipeline = throttle(async (i) => {
if (i % 2 == 0) {

@@ -69,0 +70,0 @@ throw new Error("Thrown inside throttled function!");

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc